From bec7cffa29313a62a6dae41123fa1271d290f6f5 Mon Sep 17 00:00:00 2001 From: Theo McCabe Date: Thu, 18 Apr 2024 11:20:32 +0100 Subject: [PATCH 01/36] ensure we have latest gpt-4-turbo model costs --- gpt_engineer/core/token_usage.py | 1 + pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/gpt_engineer/core/token_usage.py b/gpt_engineer/core/token_usage.py index eb06a537f9..f979e8f387 100644 --- a/gpt_engineer/core/token_usage.py +++ b/gpt_engineer/core/token_usage.py @@ -284,6 +284,7 @@ def usage_cost(self) -> float | None: result = 0 for log in self.log(): + if self.model_name == "gpt-3.5": result += get_openai_token_cost_for_model( self.model_name, log.total_prompt_tokens, is_completion=False ) diff --git a/pyproject.toml b/pyproject.toml index f3bf50b5a6..b2609b4835 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -27,7 +27,7 @@ dataclasses-json = "0.5.7" tiktoken = ">=0.0.4" tabulate = "0.9.0" python-dotenv = ">=0.21.0" -langchain = "^0.1" +langchain = "^0.1.16" langchain_openai = "*" toml = ">=0.10.2" tomlkit = "^0.12.4" From cf689e7aec8087b15d53bcc5d4c030d49bc98636 Mon Sep 17 00:00:00 2001 From: Theo McCabe Date: Thu, 18 Apr 2024 11:22:32 +0100 Subject: [PATCH 02/36] hmm --- gpt_engineer/core/token_usage.py | 1 - 1 file changed, 1 deletion(-) diff --git a/gpt_engineer/core/token_usage.py b/gpt_engineer/core/token_usage.py index f979e8f387..eb06a537f9 100644 --- a/gpt_engineer/core/token_usage.py +++ b/gpt_engineer/core/token_usage.py @@ -284,7 +284,6 @@ def usage_cost(self) -> float | None: result = 0 for log in self.log(): - if self.model_name == "gpt-3.5": result += get_openai_token_cost_for_model( self.model_name, log.total_prompt_tokens, is_completion=False ) From d5d6104514be249d673ad050a703cd122eff7df6 Mon Sep 17 00:00:00 2001 From: Theo McCabe Date: Mon, 6 May 2024 20:26:07 +0100 Subject: [PATCH 03/36] get started --- .gitignore | 2 + .../interactive_cli/examples/test1.py | 11 +++ .../interactive_cli/examples/test2.py | 27 +++++++ .../interactive_cli/examples/test3.py | 32 ++++++++ .../interactive_cli/examples/test4.py | 35 +++++++++ .../applications/interactive_cli/main.py | 78 +++++++++++++++++++ 6 files changed, 185 insertions(+) create mode 100644 gpt_engineer/applications/interactive_cli/examples/test1.py create mode 100644 gpt_engineer/applications/interactive_cli/examples/test2.py create mode 100644 gpt_engineer/applications/interactive_cli/examples/test3.py create mode 100644 gpt_engineer/applications/interactive_cli/examples/test4.py create mode 100644 gpt_engineer/applications/interactive_cli/main.py diff --git a/.gitignore b/.gitignore index 79745c28db..94000e076a 100644 --- a/.gitignore +++ b/.gitignore @@ -92,3 +92,5 @@ webapp/.next/ # locally saved datasets gpt_engineer/benchmark/benchmarks/apps/dataset gpt_engineer/benchmark/benchmarks/mbpp/dataset + +prompt \ No newline at end of file diff --git a/gpt_engineer/applications/interactive_cli/examples/test1.py b/gpt_engineer/applications/interactive_cli/examples/test1.py new file mode 100644 index 0000000000..08c8bb8a7a --- /dev/null +++ b/gpt_engineer/applications/interactive_cli/examples/test1.py @@ -0,0 +1,11 @@ +from prompt_toolkit import prompt +from prompt_toolkit.completion import WordCompleter + +def main(): + branch_name_suggestion = 'feat/name' + print("Great, sounds like a useful feature.") + branch_name = prompt('Please confirm or edit the feature branch name: ', default=branch_name_suggestion) + print(f'Creating feature branch: {branch_name}') + +if __name__ == '__main__': + main() diff --git a/gpt_engineer/applications/interactive_cli/examples/test2.py b/gpt_engineer/applications/interactive_cli/examples/test2.py new file mode 100644 index 0000000000..2bab069145 --- /dev/null +++ b/gpt_engineer/applications/interactive_cli/examples/test2.py @@ -0,0 +1,27 @@ +from prompt_toolkit.shortcuts import radiolist_dialog + +def main(): + print("Use the arrow keys to navigate. Press Enter to select.") + tasks = [ + ('0', 'Generate Whole Feature'), + ('1', 'Task A - Create a view file for account page'), + ('2', 'Task B - Make an API call to retrieve account information'), + ('3', 'Enter a custom task') + ] + + result = radiolist_dialog( + title="Suggested tasks", + text="Select the task to start with, or enter a custom task:", + values=tasks + ).run() + + if result == '3': + from prompt_toolkit import prompt + custom_task = prompt('Enter your custom task description: ') + print(f"You entered a custom task: {custom_task}") + else: + task_description = next((desc for key, desc in tasks if key == result), None) + print(f"You selected: {task_description}") + +if __name__ == '__main__': + main() diff --git a/gpt_engineer/applications/interactive_cli/examples/test3.py b/gpt_engineer/applications/interactive_cli/examples/test3.py new file mode 100644 index 0000000000..8396c1e17e --- /dev/null +++ b/gpt_engineer/applications/interactive_cli/examples/test3.py @@ -0,0 +1,32 @@ +from prompt_toolkit.shortcuts import radiolist_dialog + +def main(): + print("Diff generated. Please Review, and stage the changes you want to keep.") + + # Defining the options for the user with radiolist dialog + result = radiolist_dialog( + title="Diff Review Options", + text="Please select your action:", + values=[ + ('r', 'Retry'), + ('s', 'Stage changes and continue'), + ('c', 'Commit changes and continue'), + ('u', 'Undo') + ] + ).run() + + # Handle the user's choice + if result == 'r': + print("You have chosen to retry the diff generation.") + # Add logic to retry generating the diff + elif result == 's': + print("You have chosen to stage the changes.") + # Add logic to stage changes + elif result == 'c': + print("You have chosen to commit the changes.") + # Add logic to commit changes + else: + print("Operation cancelled.") + +if __name__ == '__main__': + main() diff --git a/gpt_engineer/applications/interactive_cli/examples/test4.py b/gpt_engineer/applications/interactive_cli/examples/test4.py new file mode 100644 index 0000000000..a9c9a9732a --- /dev/null +++ b/gpt_engineer/applications/interactive_cli/examples/test4.py @@ -0,0 +1,35 @@ +from prompt_toolkit import PromptSession +from prompt_toolkit.completion import WordCompleter + +def main(): + print("Diff generated. Please Review, and stage the changes you want to keep.") + + # Define the options and create a completer with those options + options = {'r': 'Retry', 's': 'Stage changes and continue', 'c': 'Commit changes and continue', 'u': 'Undo'} + completer = WordCompleter(['r', 's', 'c', 'u'], ignore_case=True) + session = PromptSession() + + # Using prompt to get user input + result = session.prompt( + "Please select your action \n r: Retry \n s: Stage \n c: Commit \n u: Undo \n\n", + completer=completer + ).lower() + + # Handle the user's choice + if result == 'r': + print("You have chosen to retry the diff generation.") + # Add logic to retry generating the diff + elif result == 's': + print("You have chosen to stage the changes.") + # Add logic to stage changes + elif result == 'c': + print("You have chosen to commit the changes.") + # Add logic to commit changes + elif result == 'u': + print("Undo the last operation.") + # Add logic to undo the last operation + else: + print("Invalid option selected, please run the program again.") + +if __name__ == '__main__': + main() diff --git a/gpt_engineer/applications/interactive_cli/main.py b/gpt_engineer/applications/interactive_cli/main.py new file mode 100644 index 0000000000..4e06656ecc --- /dev/null +++ b/gpt_engineer/applications/interactive_cli/main.py @@ -0,0 +1,78 @@ +import typer +from dotenv import load_dotenv + +import os +from prompt_toolkit import prompt +from prompt_toolkit.validation import Validator, ValidationError + +app = typer.Typer() + +class FeatureValidator(Validator): + def validate(self, document): + text = document.text + if not text: + raise ValidationError(message="Feature description cannot be empty", cursor_position=len(text)) + +def load_feature_description(feature_file_path): + """ + Load the feature description from a file or prompt the user if the file doesn't exist. + """ + if os.path.exists(feature_file_path): + with open(feature_file_path, 'r', encoding='utf-8') as file: + feature_description = file.read().strip() + else: + print(f"No feature file found at {feature_file_path}. Please enter the feature description:") + feature_description = prompt("Feature: ", validator=FeatureValidator()) + # todo: create feature text file containing the users feature + + + return feature_description + +@app.command() +def main( + project_path: str = typer.Argument(".", help="path"), + model: str = typer.Argument("gpt-4-turbo", help="model id string"), + temperature: float = typer.Option( + 0.1, + "--temperature", + "-t", + help="Controls randomness: lower values for more focused, deterministic outputs", + ), + azure_endpoint: str = typer.Option( + "", + "--azure", + "-a", + help="""Endpoint for your Azure OpenAI Service (https://xx.openai.azure.com). + In that case, the given model is the deployment name chosen in the Azure AI Studio.""", + ), + verbose: bool = typer.Option( + False, "--verbose", "-v", help="Enable verbose logging for debugging." + ), + debug: bool = typer.Option( + False, "--debug", "-d", help="Enable debug mode for debugging." + ), +): + """ + Run GPTE Interactive Improve + """ + + load_dotenv() + + # todo: check that git repo exists. If not - ask the user to create a git repository with a suitable git ignore which will be used to reduce ai usage + # todo: check that git repo is clean. If not - ask the user to stash or commit changes. + + feature_description = load_feature_description(os.path.join(project_path, 'feature')) + + branch_name= 'feature/new' #todo: use ai to generate branch name suggestion + + print("Great, sounds like a useful feature.") + branch_name = prompt('Please confirm or edit the feature branch name: ', default=branch_name) + print(f'Creating feature branch: {branch_name}') + #todo: use gitpython to create new branch. + + # todo: continue with the rest of the task creation flow. Every time a task is added move it to + + + +if __name__ == "__main__": + app() \ No newline at end of file From 34b2f2135997dcade3d94082fe95c4d0940a01a7 Mon Sep 17 00:00:00 2001 From: Theo McCabe Date: Mon, 6 May 2024 22:18:23 +0100 Subject: [PATCH 04/36] WIP --- .../{examples => cli code samples}/test1.py | 0 .../{examples => cli code samples}/test2.py | 0 .../{examples => cli code samples}/test3.py | 0 .../{examples => cli code samples}/test4.py | 0 .../interactive_cli/example_project/README.md | 19 +++++++ .../example_project/index.html | 43 ++++++++++++++++ .../example_project/styles.css | 49 +++++++++++++++++++ .../interactive_cli/generation_tools.py | 21 ++++++++ .../applications/interactive_cli/main.py | 35 ++++++++++--- 9 files changed, 159 insertions(+), 8 deletions(-) rename gpt_engineer/applications/interactive_cli/{examples => cli code samples}/test1.py (100%) rename gpt_engineer/applications/interactive_cli/{examples => cli code samples}/test2.py (100%) rename gpt_engineer/applications/interactive_cli/{examples => cli code samples}/test3.py (100%) rename gpt_engineer/applications/interactive_cli/{examples => cli code samples}/test4.py (100%) create mode 100644 gpt_engineer/applications/interactive_cli/example_project/README.md create mode 100644 gpt_engineer/applications/interactive_cli/example_project/index.html create mode 100644 gpt_engineer/applications/interactive_cli/example_project/styles.css create mode 100644 gpt_engineer/applications/interactive_cli/generation_tools.py diff --git a/gpt_engineer/applications/interactive_cli/examples/test1.py b/gpt_engineer/applications/interactive_cli/cli code samples/test1.py similarity index 100% rename from gpt_engineer/applications/interactive_cli/examples/test1.py rename to gpt_engineer/applications/interactive_cli/cli code samples/test1.py diff --git a/gpt_engineer/applications/interactive_cli/examples/test2.py b/gpt_engineer/applications/interactive_cli/cli code samples/test2.py similarity index 100% rename from gpt_engineer/applications/interactive_cli/examples/test2.py rename to gpt_engineer/applications/interactive_cli/cli code samples/test2.py diff --git a/gpt_engineer/applications/interactive_cli/examples/test3.py b/gpt_engineer/applications/interactive_cli/cli code samples/test3.py similarity index 100% rename from gpt_engineer/applications/interactive_cli/examples/test3.py rename to gpt_engineer/applications/interactive_cli/cli code samples/test3.py diff --git a/gpt_engineer/applications/interactive_cli/examples/test4.py b/gpt_engineer/applications/interactive_cli/cli code samples/test4.py similarity index 100% rename from gpt_engineer/applications/interactive_cli/examples/test4.py rename to gpt_engineer/applications/interactive_cli/cli code samples/test4.py diff --git a/gpt_engineer/applications/interactive_cli/example_project/README.md b/gpt_engineer/applications/interactive_cli/example_project/README.md new file mode 100644 index 0000000000..b55fe812a9 --- /dev/null +++ b/gpt_engineer/applications/interactive_cli/example_project/README.md @@ -0,0 +1,19 @@ +# Local Bakery Website + +## Overview +This project is the codebase for a static website for a Local Bakery. It's designed to provide essential information about the bakery, including an about us section, a showcase of products, and contact information. + +## Features +- **About Us**: Learn more about the history and mission of the bakery. +- **Products**: Browse the list of baked goods we offer. +- **Contact**: Find contact details and how to reach out to us. + +## File Structure +- `index.html`: The main HTML file that contains the structure of the website. +- `styles.css`: CSS file for styling the website. + +## Getting Started +To get a local copy up and running, follow these simple steps: + +### Prerequisites +- Any modern web browser (e.g., Chrome, Firefox, Safari, or Edge). \ No newline at end of file diff --git a/gpt_engineer/applications/interactive_cli/example_project/index.html b/gpt_engineer/applications/interactive_cli/example_project/index.html new file mode 100644 index 0000000000..17621e8048 --- /dev/null +++ b/gpt_engineer/applications/interactive_cli/example_project/index.html @@ -0,0 +1,43 @@ + + + + + + Sweet Treats Bakery + + + +
+

Welcome to Sweet Treats Bakery

+ +
+ +
+

About Us

+

Family-owned bakery serving homemade treats since 1998. We pride ourselves on using local ingredients.

+
+ + + +
+

Contact Us

+

Visit us or send a message!

+

Address: 123 Baking St, Foodtown, TX

+

Email: contact@sweettreatsbakery.com

+
+ +
+

Thank you for visiting our website! Follow us on social media for updates.

+
+ + diff --git a/gpt_engineer/applications/interactive_cli/example_project/styles.css b/gpt_engineer/applications/interactive_cli/example_project/styles.css new file mode 100644 index 0000000000..ab9b9c97bc --- /dev/null +++ b/gpt_engineer/applications/interactive_cli/example_project/styles.css @@ -0,0 +1,49 @@ +body { + font-family: 'Arial', sans-serif; + line-height: 1.6; + margin: 0; + padding: 0; + background: #f4f4f4; + color: #333; + display: flex; + flex-direction: column; +} + +header { + background: #c0392b; + color: #fff; + padding: 10px 20px; + text-align: center; +} + +header nav ul { + list-style: none; + padding: 0; +} + +header nav ul li { + display: inline; + margin-left: 10px; +} + +section { + margin: 20px; + padding: 20px; + background: #fff; +} + +footer { + text-align: center; + padding: 10px 20px; + background: #333; + color: #fff; +} + +a { + color: white; + text-decoration: none; +} + +a:hover { + text-decoration: underline; +} diff --git a/gpt_engineer/applications/interactive_cli/generation_tools.py b/gpt_engineer/applications/interactive_cli/generation_tools.py new file mode 100644 index 0000000000..fd9a210e3e --- /dev/null +++ b/gpt_engineer/applications/interactive_cli/generation_tools.py @@ -0,0 +1,21 @@ +from gpt_engineer.core.ai import AI +from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler + + +def generate_branch_name(ai: AI, feature_description: str) -> str: + system_prompt = """ + You are a branch name autocomplete / suggestion tool. Based on the users input, please respond with a single suggestion of a branch name and notthing else. + + Example: + + Input: I want to add a login button + Output: feature/login-button + """ + + ai.llm.callbacks.clear() # silent + + messages = ai.start(system_prompt, feature_description,step_name="name-branch") + + ai.llm.callbacks.append(StreamingStdOutCallbackHandler()) + + return messages[-1].content.strip() \ No newline at end of file diff --git a/gpt_engineer/applications/interactive_cli/main.py b/gpt_engineer/applications/interactive_cli/main.py index 4e06656ecc..e1240c27e7 100644 --- a/gpt_engineer/applications/interactive_cli/main.py +++ b/gpt_engineer/applications/interactive_cli/main.py @@ -5,6 +5,10 @@ from prompt_toolkit import prompt from prompt_toolkit.validation import Validator, ValidationError +from gpt_engineer.core.ai import AI + +from generation_tools import generate_branch_name + app = typer.Typer() class FeatureValidator(Validator): @@ -21,9 +25,15 @@ def load_feature_description(feature_file_path): with open(feature_file_path, 'r', encoding='utf-8') as file: feature_description = file.read().strip() else: - print(f"No feature file found at {feature_file_path}. Please enter the feature description:") - feature_description = prompt("Feature: ", validator=FeatureValidator()) - # todo: create feature text file containing the users feature + print(f"No file found at {feature_file_path}. Please describe the feature or change to work on:") + feature_description = prompt( + "", + multiline=True, + validator=FeatureValidator(), + bottom_toolbar="Press Ctrl+O to finish" + ) + with open(feature_file_path, 'w', encoding='utf-8') as file: + file.write(feature_description) return feature_description @@ -60,17 +70,26 @@ def main( # todo: check that git repo exists. If not - ask the user to create a git repository with a suitable git ignore which will be used to reduce ai usage # todo: check that git repo is clean. If not - ask the user to stash or commit changes. + + ai = AI( + model_name=model, + temperature=temperature, + azure_endpoint=azure_endpoint, + ) feature_description = load_feature_description(os.path.join(project_path, 'feature')) - branch_name= 'feature/new' #todo: use ai to generate branch name suggestion + branch_name = generate_branch_name(ai, feature_description) + + print("\nFeature file created.\n ") - print("Great, sounds like a useful feature.") branch_name = prompt('Please confirm or edit the feature branch name: ', default=branch_name) - print(f'Creating feature branch: {branch_name}') - #todo: use gitpython to create new branch. - # todo: continue with the rest of the task creation flow. Every time a task is added move it to + # todo: use gitpython to create new branch. + + print(f'\nFeature branch created.\n') + + # todo: continue with the rest of the task creation flow. Every time a task is added move it to a task file From 6e3bbdcd2e8d9a0f054ab2806b236ea03a9ebec1 Mon Sep 17 00:00:00 2001 From: Theo McCabe Date: Wed, 8 May 2024 16:29:16 +0100 Subject: [PATCH 05/36] guess so --- .../interactive_cli/example_project/.gitignore | 1 + .../interactive_cli/example_project/.ticket/feature | 1 + .../example_project/.ticket/progress.json | 9 +++++++++ .../interactive_cli/example_project/.ticket/task | 5 +++++ .../interactive_cli/example_project/index.html | 1 - 5 files changed, 16 insertions(+), 1 deletion(-) create mode 100644 gpt_engineer/applications/interactive_cli/example_project/.gitignore create mode 100644 gpt_engineer/applications/interactive_cli/example_project/.ticket/feature create mode 100644 gpt_engineer/applications/interactive_cli/example_project/.ticket/progress.json create mode 100644 gpt_engineer/applications/interactive_cli/example_project/.ticket/task diff --git a/gpt_engineer/applications/interactive_cli/example_project/.gitignore b/gpt_engineer/applications/interactive_cli/example_project/.gitignore new file mode 100644 index 0000000000..2a8f8ca2dd --- /dev/null +++ b/gpt_engineer/applications/interactive_cli/example_project/.gitignore @@ -0,0 +1 @@ +#.ticket - track this folder for testing diff --git a/gpt_engineer/applications/interactive_cli/example_project/.ticket/feature b/gpt_engineer/applications/interactive_cli/example_project/.ticket/feature new file mode 100644 index 0000000000..7e4abfae75 --- /dev/null +++ b/gpt_engineer/applications/interactive_cli/example_project/.ticket/feature @@ -0,0 +1 @@ +I want to create a feedback form on the website to collect user feedback \ No newline at end of file diff --git a/gpt_engineer/applications/interactive_cli/example_project/.ticket/progress.json b/gpt_engineer/applications/interactive_cli/example_project/.ticket/progress.json new file mode 100644 index 0000000000..e257633cdd --- /dev/null +++ b/gpt_engineer/applications/interactive_cli/example_project/.ticket/progress.json @@ -0,0 +1,9 @@ +{ + "todo": [ + "task 3" + ], + "done": [ + "task 1", + "task 2" + ] +} \ No newline at end of file diff --git a/gpt_engineer/applications/interactive_cli/example_project/.ticket/task b/gpt_engineer/applications/interactive_cli/example_project/.ticket/task new file mode 100644 index 0000000000..f75456aaa6 --- /dev/null +++ b/gpt_engineer/applications/interactive_cli/example_project/.ticket/task @@ -0,0 +1,5 @@ +Create a local sql lite database for development. + +Initialize it with a feedback table. + +The table can store a comment and a rating out of 5 \ No newline at end of file diff --git a/gpt_engineer/applications/interactive_cli/example_project/index.html b/gpt_engineer/applications/interactive_cli/example_project/index.html index 17621e8048..ef3a354dce 100644 --- a/gpt_engineer/applications/interactive_cli/example_project/index.html +++ b/gpt_engineer/applications/interactive_cli/example_project/index.html @@ -26,7 +26,6 @@

About Us

From 4c7a9b9ff5e02c0b79498eb0c5dcf7de913db1c2 Mon Sep 17 00:00:00 2001 From: Theo McCabe Date: Thu, 9 May 2024 21:19:33 +0100 Subject: [PATCH 06/36] wip --- .../example_project/.gitignore | 2 + .../example_project/.ticket/files | 20 ++++ .../interactive_cli/generation_tools.py | 40 +++++++ .../interactive_cli/git_context.py | 71 ++++++++++++ .../applications/interactive_cli/main.py | 90 ++++++++++----- .../applications/interactive_cli/ticket.py | 106 ++++++++++++++++++ 6 files changed, 303 insertions(+), 26 deletions(-) create mode 100644 gpt_engineer/applications/interactive_cli/example_project/.ticket/files create mode 100644 gpt_engineer/applications/interactive_cli/git_context.py create mode 100644 gpt_engineer/applications/interactive_cli/ticket.py diff --git a/gpt_engineer/applications/interactive_cli/example_project/.gitignore b/gpt_engineer/applications/interactive_cli/example_project/.gitignore index 2a8f8ca2dd..12ec290c57 100644 --- a/gpt_engineer/applications/interactive_cli/example_project/.gitignore +++ b/gpt_engineer/applications/interactive_cli/example_project/.gitignore @@ -1 +1,3 @@ #.ticket - track this folder for testing + +ignored_test \ No newline at end of file diff --git a/gpt_engineer/applications/interactive_cli/example_project/.ticket/files b/gpt_engineer/applications/interactive_cli/example_project/.ticket/files new file mode 100644 index 0000000000..8686460030 --- /dev/null +++ b/gpt_engineer/applications/interactive_cli/example_project/.ticket/files @@ -0,0 +1,20 @@ +project/ +├── src/ +│ ├── main/ +│ │ ├── java/ +│ │ │ └── com/ +│ │ │ └── example/ +│ │ │ └── MyApp.java +│ │ └── resources/ +│ │ └── config.properties +│ └── test/ +│ ├── java/ +│ │ └── com/ +│ │ └── example/ +│ │ └── MyAppTest.java +│ └── resources/ +│ └── testdata.txt +├── lib/ +│ └── external-library.jar +└── README.md + diff --git a/gpt_engineer/applications/interactive_cli/generation_tools.py b/gpt_engineer/applications/interactive_cli/generation_tools.py index fd9a210e3e..45f23d4af0 100644 --- a/gpt_engineer/applications/interactive_cli/generation_tools.py +++ b/gpt_engineer/applications/interactive_cli/generation_tools.py @@ -18,4 +18,44 @@ def generate_branch_name(ai: AI, feature_description: str) -> str: ai.llm.callbacks.append(StreamingStdOutCallbackHandler()) + return messages[-1].content.strip() + + +def generate_suggested_tasks(ai: AI, input: str) -> str: + system_prompt = """ +You are a software engineer work planning tool. Given a feature description, a list of tasks already completed, and sections of the code +repository we are working on, suggest a list of tasks to be done in order move towards the end goal of completing the feature. + +First start by outputting your planning thoughts: an overview of what we are trying to achieve, what we have achieved so far, and what is left to be done. + +Then output the list of tasks to be done. Please try to keep the tasks small, actionable and independantly commitable. + +The output format will be XML as follows: + + + + + + + + + + + + + + + + + + +Respond in XML and nothing else. +""" + + ai.llm.callbacks.clear() # silent + + messages = ai.start(system_prompt, input,step_name="suggest-tasks") + + ai.llm.callbacks.append(StreamingStdOutCallbackHandler()) + return messages[-1].content.strip() \ No newline at end of file diff --git a/gpt_engineer/applications/interactive_cli/git_context.py b/gpt_engineer/applications/interactive_cli/git_context.py new file mode 100644 index 0000000000..afb7e0c451 --- /dev/null +++ b/gpt_engineer/applications/interactive_cli/git_context.py @@ -0,0 +1,71 @@ +from typing import List +from git import Repo, GitCommandError + +class Commit: + """ + Represents a single Git commit with a description and a diff. + """ + def __init__(self, description: str, diff: str): + self.description = description + self.diff = diff + + def __str__(self): + diff_str = "\n".join(str(d) for d in self.diff) + return f"Commit Description: {self.description}\nDiff:\n{diff_str}" + +class GitContext: + """ + Represents the Git context of a project directory, including a list of commits, + and staged and unstaged changes. + """ + def __init__(self, commits: List[Commit], staged_changes: str, unstaged_changes: str): + self.commits = commits + self.staged_changes = staged_changes + self.unstaged_changes = unstaged_changes + + @classmethod + def load_from_directory(cls, project_path: str) -> "GitContext": + """ + Load the Git context from the specified project directory using GitPython. + + Parameters + ---------- + project_path : str + The path to the project directory. + + Returns + ------- + GitContext + An instance of GitContext populated with commit details and changes. + """ + try: + # Initialize the repository object + repo = Repo(project_path) + assert not repo.bare # Ensure it is not a bare repository + + # Staged changes + staged_changes = repo.git.diff('--cached') + + # Unstaged changes + unstaged_changes = repo.git.diff() + + # Identify the current branch + current_branch = repo.active_branch + + commits = list(repo.iter_commits(rev=current_branch.name)) + + # Create Commit objects with descriptions and diffs + commit_objects = [ + Commit( + commit.summary, + commit.diff(commit.parents[0], create_patch=True) if commit.parents else commit.diff(None, create_patch=True) + ) + for commit in commits + ] + + return cls(commit_objects, staged_changes, unstaged_changes) + + except (AssertionError, GitCommandError, IndexError) as e: + print(f"Error accessing repository: {e}") + return cls([], '', '') + diff --git a/gpt_engineer/applications/interactive_cli/main.py b/gpt_engineer/applications/interactive_cli/main.py index e1240c27e7..c51f951ad4 100644 --- a/gpt_engineer/applications/interactive_cli/main.py +++ b/gpt_engineer/applications/interactive_cli/main.py @@ -1,13 +1,14 @@ import typer from dotenv import load_dotenv +from ticket import Ticket -import os from prompt_toolkit import prompt from prompt_toolkit.validation import Validator, ValidationError from gpt_engineer.core.ai import AI -from generation_tools import generate_branch_name +from generation_tools import generate_branch_name, generate_suggested_tasks +from git_context import GitContext app = typer.Typer() @@ -17,31 +18,67 @@ def validate(self, document): if not text: raise ValidationError(message="Feature description cannot be empty", cursor_position=len(text)) -def load_feature_description(feature_file_path): - """ - Load the feature description from a file or prompt the user if the file doesn't exist. - """ - if os.path.exists(feature_file_path): - with open(feature_file_path, 'r', encoding='utf-8') as file: - feature_description = file.read().strip() - else: - print(f"No file found at {feature_file_path}. Please describe the feature or change to work on:") - feature_description = prompt( - "", + +def initialize_new_feature(ai, ticket): + + ticket.clear_ticket() + + feature_description = prompt( + "Write feature description: ", multiline=True, validator=FeatureValidator(), bottom_toolbar="Press Ctrl+O to finish" ) - with open(feature_file_path, 'w', encoding='utf-8') as file: - file.write(feature_description) - + + ticket.feature = feature_description + ticket.save_feature() + + # print("\n Ticket files created at .ticket \n ") + + branch_name = generate_branch_name(ai, feature_description) + + branch_name = prompt('\nConfirm branch name: ', default=branch_name) + + # todo: use gitpython to create new branch. + + print(f'\nFeature branch created.\n') + + +def get_context_string(ticket, git_context, code): + input = f""" +## Feature +{ticket.feature} + +## Completed Tasks +{ticket.progress.done} + +## Git Context +### Commits +{git_context.commits} + +### Staged Changes +{git_context.staged_changes} + +## Current Codebase +{code} +""" + + +def choose_next_task(ai, ticket, context): + print(f"There are {len(ticket.progress.done)} tasks completed so far. What shall we do next?") + + suggested_tasks = generate_suggested_tasks + + - return feature_description @app.command() def main( project_path: str = typer.Argument(".", help="path"), model: str = typer.Argument("gpt-4-turbo", help="model id string"), + new: bool = typer.Option( + False, "--new", "-n", help="Initialize new feature." + ), temperature: float = typer.Option( 0.1, "--temperature", @@ -60,7 +97,7 @@ def main( ), debug: bool = typer.Option( False, "--debug", "-d", help="Enable debug mode for debugging." - ), + ) ): """ Run GPTE Interactive Improve @@ -76,18 +113,19 @@ def main( temperature=temperature, azure_endpoint=azure_endpoint, ) - - feature_description = load_feature_description(os.path.join(project_path, 'feature')) - - branch_name = generate_branch_name(ai, feature_description) - print("\nFeature file created.\n ") + ticket = Ticket.load_or_create_at_directory(project_path) - branch_name = prompt('Please confirm or edit the feature branch name: ', default=branch_name) + if new: + initialize_new_feature(ai, ticket) - # todo: use gitpython to create new branch. + git_context = GitContext.load_from_directory(project_path) - print(f'\nFeature branch created.\n') + print(git_context.staged_changes) + print(git_context.unstaged_changes) + for commit in git_context.commits: + print(commit) + print() # todo: continue with the rest of the task creation flow. Every time a task is added move it to a task file diff --git a/gpt_engineer/applications/interactive_cli/ticket.py b/gpt_engineer/applications/interactive_cli/ticket.py new file mode 100644 index 0000000000..eee2bb8322 --- /dev/null +++ b/gpt_engineer/applications/interactive_cli/ticket.py @@ -0,0 +1,106 @@ +import json +from pathlib import Path + +class Ticket: + """ + Represents a ticket with a feature, task, and progress. + """ + def __init__(self, feature: str, task: str, progress: dict): + self.feature = feature + self.task = task + self.progress = progress + + @classmethod + def load_or_create_at_directory(cls, project_path: str) -> "Ticket": + """ + Load the ticket data from a directory. + """ + feature_path = Path(project_path) / ".ticket" / "feature" + task_path = Path(project_path) / ".ticket" / "task" + progress_path = Path(project_path) / ".ticket" / "progress.json" + + if not feature_path.exists(): + feature = "" + cls._save_feature() + else: + with open(feature_path, 'r', encoding='utf-8') as file: + feature = file.read().strip() + + if not task_path.exists(): + task = "" + cls._save_task() + else: + with open(task_path, 'r', encoding='utf-8') as file: + task = file.read().strip() + + if not progress_path.exists(): + progress = {"todo": [], "done": []} + cls._save_progress() + else: + with open(progress_path, 'r', encoding='utf-8') as file: + progress = json.load(file) + + return cls(feature, task, progress) + + + def clear_ticket(self): + """ + Clears the feature and task files and resets the progress.json file. + """ + self.feature = "" + self.task = "" + self.progress = {"todo": [], "done": []} + self._save() + + def update_feature(self, text: str): + """ + Updates the feature file with new text. + + Parameters + ---------- + text : str + The new text to write to the feature file. + """ + self.feature = text + self._save_feature() + + def update_task(self, text: str): + """ + Updates the task file with new text. + + Parameters + ---------- + text : str + The new text to write to the task file. + """ + self.task = text + self._save_task() + + def complete_task(self): + """ + Moves the current task to the 'done' list in the progress.json file and clears the task file. + """ + if self.task: + self.progress['done'].append(self.task) + self.task = "" + self._save() + + def _save(self): + """ + Helper method to save the feature, task, and progress to their respective files. + """ + self._save_feature() + self._save_task() + self._save_progress() + + def _save_feature(self): + with open(Path(self.project_path) / ".ticket" / "feature", 'w', encoding='utf-8') as file: + file.write(self.feature) + + def _save_task(self): + with open(Path(self.project_path) / ".ticket" / "task", 'w', encoding='utf-8') as file: + file.write(self.task) + + def _save_progress(self): + with open(Path(self.project_path) / ".ticket" / "progress.json", 'w', encoding='utf-8') as file: + json.dump(self.progress, file, indent=4) \ No newline at end of file From ca15c7a6d94dcbb4c15fca9ff7727bd625ac31c5 Mon Sep 17 00:00:00 2001 From: Theo McCabe Date: Thu, 9 May 2024 22:35:08 +0100 Subject: [PATCH 07/36] ye --- .../cli-code_samples/edit_file_tree.py | 99 +++++++++++++++++++ .../test1.py | 0 .../test2.py | 0 .../test3.py | 0 .../test4.py | 0 .../interactive_cli/git_context.py | 71 ------------- .../applications/interactive_cli/main.py | 10 +- .../interactive_cli/repository.py | 68 +++++++++++++ 8 files changed, 174 insertions(+), 74 deletions(-) create mode 100644 gpt_engineer/applications/interactive_cli/cli-code_samples/edit_file_tree.py rename gpt_engineer/applications/interactive_cli/{cli code samples => cli-code_samples}/test1.py (100%) rename gpt_engineer/applications/interactive_cli/{cli code samples => cli-code_samples}/test2.py (100%) rename gpt_engineer/applications/interactive_cli/{cli code samples => cli-code_samples}/test3.py (100%) rename gpt_engineer/applications/interactive_cli/{cli code samples => cli-code_samples}/test4.py (100%) delete mode 100644 gpt_engineer/applications/interactive_cli/git_context.py create mode 100644 gpt_engineer/applications/interactive_cli/repository.py diff --git a/gpt_engineer/applications/interactive_cli/cli-code_samples/edit_file_tree.py b/gpt_engineer/applications/interactive_cli/cli-code_samples/edit_file_tree.py new file mode 100644 index 0000000000..924660b8ad --- /dev/null +++ b/gpt_engineer/applications/interactive_cli/cli-code_samples/edit_file_tree.py @@ -0,0 +1,99 @@ +from dataclasses import dataclass, field +from typing import Dict, Optional, List +from prompt_toolkit import PromptSession +from prompt_toolkit.application import Application +from prompt_toolkit.key_binding import KeyBindings +from prompt_toolkit.widgets import TextArea +from prompt_toolkit.layout.layout import Layout +from prompt_toolkit.layout.containers import HSplit +from prompt_toolkit.layout.controls import FormattedTextControl +from prompt_toolkit.buffer import Buffer + +def generate_file_tree(files): + """ + Generates a file tree from a list of file paths. + """ + tree = {} + for file in files: + parts = file.split('/') + node = tree + for part in parts: + node = node.setdefault(part, {}) + return tree + +def generate_tree_string(node, prefix=''): + """ + Recursively generates a string representation of the file tree. + """ + lines = [] + items = list(node.items()) + for i, (key, subnode) in enumerate(items): + connector = "└─" if i == len(items) - 1 else "├─" + if subnode is not None: # Check if it's a directory or a commented directory + lines.append(f"{prefix}{connector} {key}/") + if subnode: # Only append sub-tree if it's not commented out + extension = " " if i == len(items) - 1 else "│ " + lines.extend(generate_tree_string(subnode, prefix + extension)) + else: # it's a file or commented file + lines.append(f"{prefix}{connector} {key}") + return lines + +def get_editable_tree(files): + tree = generate_file_tree(files) + tree_lines = generate_tree_string(tree) + return '\n'.join(tree_lines) + +from prompt_toolkit.layout import Window + +def interactive_edit_files(files): + session = PromptSession() + + # Generate editable file tree + editable_tree = get_editable_tree(files) + + # Text area for file tree + text_area = TextArea(text=editable_tree, + scrollbar=True, + multiline=True, + wrap_lines=False) + + # Ensure the text area starts in insert mode + # text_area.buffer.cursor_position += len(text_area.text) + text_area.buffer.insert_mode = False + + # Instructions wrapped in a Window + instructions = Window(content=FormattedTextControl( + text='Please comment out unneeded files to reduce context overhead.\n' + 'You can comment out lines by adding "#" at the beginning of the line.\n' + 'Press Ctrl-S to save and exit.'), + height=3, # Adjust height as necessary + style='class:instruction') + + # Container that holds both the instructions and the text area + instruction_container = HSplit([instructions, text_area]) + + # Create a layout out of the widget above + layout = Layout(instruction_container) + + # Add key bindings for custom actions like save + bindings = KeyBindings() + + @bindings.add('c-s') + def _(event): + # Saving functionality or further processing can be implemented here + print('Saving and processing your tree...') + event.app.exit() + + app = Application(layout=layout, key_bindings=bindings, full_screen=True) + app.run() + +# Example usage +tracked_files = [ + "project/src/main/java/com/example/MyApp.java", + "project/src/main/resources/config.properties", + "project/src/test/java/com/example/MyAppTest.java", + "project/src/test/resources/testdata.txt", + "project/lib/external-library.jar", + "project/README.md" +] +interactive_edit_files(tracked_files) diff --git a/gpt_engineer/applications/interactive_cli/cli code samples/test1.py b/gpt_engineer/applications/interactive_cli/cli-code_samples/test1.py similarity index 100% rename from gpt_engineer/applications/interactive_cli/cli code samples/test1.py rename to gpt_engineer/applications/interactive_cli/cli-code_samples/test1.py diff --git a/gpt_engineer/applications/interactive_cli/cli code samples/test2.py b/gpt_engineer/applications/interactive_cli/cli-code_samples/test2.py similarity index 100% rename from gpt_engineer/applications/interactive_cli/cli code samples/test2.py rename to gpt_engineer/applications/interactive_cli/cli-code_samples/test2.py diff --git a/gpt_engineer/applications/interactive_cli/cli code samples/test3.py b/gpt_engineer/applications/interactive_cli/cli-code_samples/test3.py similarity index 100% rename from gpt_engineer/applications/interactive_cli/cli code samples/test3.py rename to gpt_engineer/applications/interactive_cli/cli-code_samples/test3.py diff --git a/gpt_engineer/applications/interactive_cli/cli code samples/test4.py b/gpt_engineer/applications/interactive_cli/cli-code_samples/test4.py similarity index 100% rename from gpt_engineer/applications/interactive_cli/cli code samples/test4.py rename to gpt_engineer/applications/interactive_cli/cli-code_samples/test4.py diff --git a/gpt_engineer/applications/interactive_cli/git_context.py b/gpt_engineer/applications/interactive_cli/git_context.py deleted file mode 100644 index afb7e0c451..0000000000 --- a/gpt_engineer/applications/interactive_cli/git_context.py +++ /dev/null @@ -1,71 +0,0 @@ -from typing import List -from git import Repo, GitCommandError - -class Commit: - """ - Represents a single Git commit with a description and a diff. - """ - def __init__(self, description: str, diff: str): - self.description = description - self.diff = diff - - def __str__(self): - diff_str = "\n".join(str(d) for d in self.diff) - return f"Commit Description: {self.description}\nDiff:\n{diff_str}" - -class GitContext: - """ - Represents the Git context of a project directory, including a list of commits, - and staged and unstaged changes. - """ - def __init__(self, commits: List[Commit], staged_changes: str, unstaged_changes: str): - self.commits = commits - self.staged_changes = staged_changes - self.unstaged_changes = unstaged_changes - - @classmethod - def load_from_directory(cls, project_path: str) -> "GitContext": - """ - Load the Git context from the specified project directory using GitPython. - - Parameters - ---------- - project_path : str - The path to the project directory. - - Returns - ------- - GitContext - An instance of GitContext populated with commit details and changes. - """ - try: - # Initialize the repository object - repo = Repo(project_path) - assert not repo.bare # Ensure it is not a bare repository - - # Staged changes - staged_changes = repo.git.diff('--cached') - - # Unstaged changes - unstaged_changes = repo.git.diff() - - # Identify the current branch - current_branch = repo.active_branch - - commits = list(repo.iter_commits(rev=current_branch.name)) - - # Create Commit objects with descriptions and diffs - commit_objects = [ - Commit( - commit.summary, - commit.diff(commit.parents[0], create_patch=True) if commit.parents else commit.diff(None, create_patch=True) - ) - for commit in commits - ] - - return cls(commit_objects, staged_changes, unstaged_changes) - - except (AssertionError, GitCommandError, IndexError) as e: - print(f"Error accessing repository: {e}") - return cls([], '', '') - diff --git a/gpt_engineer/applications/interactive_cli/main.py b/gpt_engineer/applications/interactive_cli/main.py index c51f951ad4..c67015dc72 100644 --- a/gpt_engineer/applications/interactive_cli/main.py +++ b/gpt_engineer/applications/interactive_cli/main.py @@ -8,7 +8,7 @@ from gpt_engineer.core.ai import AI from generation_tools import generate_branch_name, generate_suggested_tasks -from git_context import GitContext +from repository import Repository app = typer.Typer() @@ -43,6 +43,8 @@ def initialize_new_feature(ai, ticket): print(f'\nFeature branch created.\n') + + def get_context_string(ticket, git_context, code): input = f""" @@ -67,7 +69,7 @@ def get_context_string(ticket, git_context, code): def choose_next_task(ai, ticket, context): print(f"There are {len(ticket.progress.done)} tasks completed so far. What shall we do next?") - suggested_tasks = generate_suggested_tasks + suggested_tasks = generate_suggested_tasks() @@ -114,12 +116,14 @@ def main( azure_endpoint=azure_endpoint, ) + repository = Repository(project_path) + ticket = Ticket.load_or_create_at_directory(project_path) if new: initialize_new_feature(ai, ticket) - git_context = GitContext.load_from_directory(project_path) + git_context = repository.get_git_context() print(git_context.staged_changes) print(git_context.unstaged_changes) diff --git a/gpt_engineer/applications/interactive_cli/repository.py b/gpt_engineer/applications/interactive_cli/repository.py new file mode 100644 index 0000000000..96870ba856 --- /dev/null +++ b/gpt_engineer/applications/interactive_cli/repository.py @@ -0,0 +1,68 @@ +from dataclasses import dataclass +from typing import List + +from git import Repo, GitCommandError +import os + +@dataclass +class Commit: + """ + Represents a single Git commit with a description and a diff. + """ + description: str + diff: str + + def __str__(self) -> str: + diff_str = "\n".join(str(d) for d in self.diff) + return f"Commit Description: {self.description}\nDiff:\n{diff_str}" + + +@dataclass +class GitContext: + """ + Represents the Git context of an in progress feature. + """ + commits: List[Commit] + staged_changes: str + unstaged_changes: str + + + +class Repository: + """ + Manages a git repository, providing functionalities to get repo status, + list files considering .gitignore, and interact with repository history. + """ + def __init__(self, repo_path: str): + self.repo_path = repo_path + self.repo = Repo(repo_path) + assert not self.repo.bare + + def get_tracked_files(self) -> List[str]: + """ + List all files that are currently tracked by Git in the repository. + """ + try: + tracked_files = self.repo.git.ls_files().split('\n') + return tracked_files + except GitCommandError as e: + print(f"Error listing tracked files: {e}") + return [] + + + def get_git_context(self): + staged_changes = self.repo.git.diff('--cached') + unstaged_changes = self.repo.git.diff() + current_branch = self.repo.active_branch + + commits = list(self.repo.iter_commits(rev=current_branch.name)) + + commit_objects = [ + Commit( + commit.summary, + commit.diff(commit.parents[0], create_patch=True) if commit.parents else commit.diff(None, create_patch=True) + ) + for commit in commits + ] + + return GitContext(commit_objects, staged_changes, unstaged_changes) \ No newline at end of file From 3b5a782f904d7d7124afda4489cd28881c2f6ff3 Mon Sep 17 00:00:00 2001 From: Theo McCabe Date: Sun, 12 May 2024 23:22:07 +0100 Subject: [PATCH 08/36] rename files --- .../applications/cli/file_selector.py | 26 +-- .../applications/interactive_cli/agent.py | 94 ++++++++ .../cli-code_samples/edit_yaml_paths.py | 85 ++++++++ .../{.ticket/feature => .feature/description} | 0 .../example_project/.feature/files.yml | 7 + .../{.ticket => .feature}/progress.json | 0 .../{.ticket => .feature}/task | 0 .../example_project/.ticket/files | 20 -- .../example_project/active_files.yml | 10 + .../applications/interactive_cli/feature.py | 100 +++++++++ .../interactive_cli/file_selection.py | 201 ++++++++++++++++++ .../applications/interactive_cli/files.py | 25 +++ .../interactive_cli/generation_tools.py | 42 +++- .../applications/interactive_cli/main.py | 98 +++------ .../interactive_cli/repository.py | 37 +++- .../applications/interactive_cli/ticket.py | 106 --------- 16 files changed, 639 insertions(+), 212 deletions(-) create mode 100644 gpt_engineer/applications/interactive_cli/agent.py create mode 100644 gpt_engineer/applications/interactive_cli/cli-code_samples/edit_yaml_paths.py rename gpt_engineer/applications/interactive_cli/example_project/{.ticket/feature => .feature/description} (100%) create mode 100644 gpt_engineer/applications/interactive_cli/example_project/.feature/files.yml rename gpt_engineer/applications/interactive_cli/example_project/{.ticket => .feature}/progress.json (100%) rename gpt_engineer/applications/interactive_cli/example_project/{.ticket => .feature}/task (100%) delete mode 100644 gpt_engineer/applications/interactive_cli/example_project/.ticket/files create mode 100644 gpt_engineer/applications/interactive_cli/example_project/active_files.yml create mode 100644 gpt_engineer/applications/interactive_cli/feature.py create mode 100644 gpt_engineer/applications/interactive_cli/file_selection.py create mode 100644 gpt_engineer/applications/interactive_cli/files.py delete mode 100644 gpt_engineer/applications/interactive_cli/ticket.py diff --git a/gpt_engineer/applications/cli/file_selector.py b/gpt_engineer/applications/cli/file_selector.py index 08c5ddce69..076672d5e1 100644 --- a/gpt_engineer/applications/cli/file_selector.py +++ b/gpt_engineer/applications/cli/file_selector.py @@ -20,6 +20,7 @@ import fnmatch import os import subprocess +import platform from pathlib import Path from typing import Any, Dict, Generator, List, Union @@ -200,32 +201,23 @@ def open_with_default_editor(self, file_path: Union[str, Path]): The path to the file to be opened in the text editor. """ - editors = [ - "gedit", - "notepad", - "nvim", - "write", - "nano", - "vim", - "emacs", - ] # Putting the beginner-friendly text editor forward chosen_editor = os.environ.get("EDITOR") # Try the preferred editor first, then fallback to common editors if chosen_editor: try: - subprocess.run([chosen_editor, file_path]) + subprocess.run([chosen_editor, str(file_path)], check=True) return except Exception: pass - for editor in editors: - try: - subprocess.run([editor, file_path]) - return - except Exception: - continue - print("No suitable text editor found. Please edit the file manually.") + # Platform-specific methods to open the file + if platform.system() == 'Windows': + os.startfile(file_path) + elif platform.system() == 'Darwin': + subprocess.run(['open', file_path]) + else: # Linux and other Unix-like systems + subprocess.run(['xdg-open', file_path]) def is_utf8(self, file_path: Union[str, Path]) -> bool: """ diff --git a/gpt_engineer/applications/interactive_cli/agent.py b/gpt_engineer/applications/interactive_cli/agent.py new file mode 100644 index 0000000000..523d6ce513 --- /dev/null +++ b/gpt_engineer/applications/interactive_cli/agent.py @@ -0,0 +1,94 @@ +from prompt_toolkit import prompt as cli_input +from prompt_toolkit.validation import Validator, ValidationError + +from gpt_engineer.core.ai import AI +from gpt_engineer.core.base_agent import BaseAgent +from gpt_engineer.core.preprompts_holder import PrepromptsHolder +from gpt_engineer.core.prompt import Prompt +from gpt_engineer.core.default.disk_memory import DiskMemory +from gpt_engineer.core.default.paths import PREPROMPTS_PATH, memory_path +from gpt_engineer.core.default.steps import improve_fn + + +from feature import Feature +from repository import Repository, GitContext +from file_selection import FileSelection +from files import Files +from generation_tools import generate_branch_name + +class FeatureValidator(Validator): + def validate(self, document): + text = document.text + if not text: + raise ValidationError(message="Feature description cannot be empty", cursor_position=len(text)) + +class FeatureAgent(BaseAgent): + """ + A cli agent which implements a feature as a set of incremental tasks + """ + def __init__( + self, + project_path: str, + feature: Feature, + repository: Repository, + ai: AI = None, + ): + self.feature = feature + self.repository = repository + self.ai = ai or AI() + + self.file_selection = FileSelection(project_path, repository) + self.memory=DiskMemory(memory_path(project_path)), + self.preprompts_holder = PrepromptsHolder(PREPROMPTS_PATH) + + + def init(self): + feature_description = cli_input( + "Write feature description: ", + multiline=True, + validator=FeatureValidator(), + bottom_toolbar="Press Ctrl+O to finish" + ) + + self.feature.set_description(feature_description) + + # print("\n Ticket files created at .ticket \n ") + + branch_name = generate_branch_name(self.ai, feature_description) + + branch_name = cli_input('\nConfirm branch name: ', default=branch_name) + + # todo: use gitpython to create new branch. + + print(f'\nFeature branch created.\n') + + self.file_selection.update_yaml_from_tracked_files() + self.file_selection.open_yaml_in_editor() + input("Please edit the YAML file and then press Enter to continue...") + + self.resume() + + + def resume(self): + + git_context = self.repository.get_git_context() + + if git_context.unstaged_changes: + if input("Unstaged changes present are you sure you want to proceed? y/n").lower() not in ["", "y", "yes"]: + print("Ok, not proceeding.") + return + + self.file_selection.update_yaml_from_tracked_files() + + context_string = "get context string " + + files = Files(self.project_path, self.file_selection.get_from_yaml()) + + task = "get_task" + + prompt = Prompt(task) + + improve_fn(files, prompt, files, self.memory, None, context_string) + + + diff --git a/gpt_engineer/applications/interactive_cli/cli-code_samples/edit_yaml_paths.py b/gpt_engineer/applications/interactive_cli/cli-code_samples/edit_yaml_paths.py new file mode 100644 index 0000000000..048c58d6df --- /dev/null +++ b/gpt_engineer/applications/interactive_cli/cli-code_samples/edit_yaml_paths.py @@ -0,0 +1,85 @@ +import yaml +from pathlib import Path +from prompt_toolkit import Application +from prompt_toolkit.key_binding import KeyBindings +from prompt_toolkit.widgets import TextArea +from prompt_toolkit.layout import Layout +from prompt_toolkit.widgets import Label +from prompt_toolkit.layout.containers import HSplit + +def create_yaml_file(file_paths): + """Generates a YAML structure from a list of file paths.""" + root = {} + for file_path in file_paths: + parts = Path(file_path).parts + current = root + for part in parts: + current = current.setdefault(part, {}) + return yaml.dump(root, sort_keys=False) + +def edit_yaml(yaml_content): + """Opens a Prompt Toolkit session to edit the YAML content.""" + kb = KeyBindings() + + @kb.add('c-q') + def exit_(event): + " Press Control-Q to exit. " + event.app.exit() + + @kb.add('c-c') + def exit_(event): + " Press Control-Q to exit. " + event.app.exit() + + @kb.add('c-s') + def save_exit(event): + " Press Control-S to save and exit. " + with open('edited_yaml.yaml', 'w') as f: + f.write(text_area.text) + print("File saved as 'edited_yaml.yaml'") + event.app.exit() + + @kb.add('c-t') + def comment_uncomment(event): + """Toggle comment on the current line with Ctrl-T.""" + tb = text_area.buffer + doc = tb.document + cursor_line_num = doc.cursor_position_row + line_text = doc.current_line_before_cursor + doc.current_line_after_cursor + if line_text.strip().startswith('#'): + tb.delete_before_cursor(len(line_text) - len(line_text.lstrip('#'))) + else: + tb.insert_text('#', move_cursor=False) + + text_area = TextArea( + text=yaml_content, + scrollbar=True, + multiline=True, + wrap_lines=False, + line_numbers=True + ) + + # Instruction label + instructions = Label(text="Use Ctrl-S to save and exit, Ctrl-Q to quit without saving, Ctrl-T to toggle comment.", + dont_extend_height=True) + + # Combine text area and instructions label in a vertical layout + layout = Layout(HSplit([text_area, instructions])) + + app = Application(layout=layout, key_bindings=kb, full_screen=False) + app.run() + +def main(file_paths): + """Generate a YAML file from file paths and open it for editing.""" + yaml_data = create_yaml_file(file_paths) + + edit_yaml(yaml_data) + +# Example usage: +file_paths = [ + '/path/to/file1.txt', + '/path/to/file2.txt', + '/path/to/dir/file3.txt' +] + +main(file_paths) diff --git a/gpt_engineer/applications/interactive_cli/example_project/.ticket/feature b/gpt_engineer/applications/interactive_cli/example_project/.feature/description similarity index 100% rename from gpt_engineer/applications/interactive_cli/example_project/.ticket/feature rename to gpt_engineer/applications/interactive_cli/example_project/.feature/description diff --git a/gpt_engineer/applications/interactive_cli/example_project/.feature/files.yml b/gpt_engineer/applications/interactive_cli/example_project/.feature/files.yml new file mode 100644 index 0000000000..9a3f1ddc74 --- /dev/null +++ b/gpt_engineer/applications/interactive_cli/example_project/.feature/files.yml @@ -0,0 +1,7 @@ +# Complete list of files shared with the AI +# Please comment out any files not needed as context for this change +# This saves money and avoids overwhelming the AI +- .gitignore +#- 'README.md' +- index.html +- styles.css diff --git a/gpt_engineer/applications/interactive_cli/example_project/.ticket/progress.json b/gpt_engineer/applications/interactive_cli/example_project/.feature/progress.json similarity index 100% rename from gpt_engineer/applications/interactive_cli/example_project/.ticket/progress.json rename to gpt_engineer/applications/interactive_cli/example_project/.feature/progress.json diff --git a/gpt_engineer/applications/interactive_cli/example_project/.ticket/task b/gpt_engineer/applications/interactive_cli/example_project/.feature/task similarity index 100% rename from gpt_engineer/applications/interactive_cli/example_project/.ticket/task rename to gpt_engineer/applications/interactive_cli/example_project/.feature/task diff --git a/gpt_engineer/applications/interactive_cli/example_project/.ticket/files b/gpt_engineer/applications/interactive_cli/example_project/.ticket/files deleted file mode 100644 index 8686460030..0000000000 --- a/gpt_engineer/applications/interactive_cli/example_project/.ticket/files +++ /dev/null @@ -1,20 +0,0 @@ -project/ -├── src/ -│ ├── main/ -│ │ ├── java/ -│ │ │ └── com/ -│ │ │ └── example/ -│ │ │ └── MyApp.java -│ │ └── resources/ -│ │ └── config.properties -│ └── test/ -│ ├── java/ -│ │ └── com/ -│ │ └── example/ -│ │ └── MyAppTest.java -│ └── resources/ -│ └── testdata.txt -├── lib/ -│ └── external-library.jar -└── README.md - diff --git a/gpt_engineer/applications/interactive_cli/example_project/active_files.yml b/gpt_engineer/applications/interactive_cli/example_project/active_files.yml new file mode 100644 index 0000000000..1416b31250 --- /dev/null +++ b/gpt_engineer/applications/interactive_cli/example_project/active_files.yml @@ -0,0 +1,10 @@ +# Please comment out files you want to exclude from the AI context (this saves money and can avoid overwhelming the AI) +- .ticket/: + - feature + - files.yml + - progress.json + - task +- .gitignore +- README.md +- index.html +- styles.css diff --git a/gpt_engineer/applications/interactive_cli/feature.py b/gpt_engineer/applications/interactive_cli/feature.py new file mode 100644 index 0000000000..6fc3995565 --- /dev/null +++ b/gpt_engineer/applications/interactive_cli/feature.py @@ -0,0 +1,100 @@ +import json +from pathlib import Path +from typing import Union + +from gpt_engineer.core.default.disk_memory import DiskMemory + +class Feature(DiskMemory): + """ + Represents a ticket which will be developed incrementally, + + Includes with a feature (overal description of the change), + a task (current incremental work item), + and progress (history of incremental work completed) + """ + def __init__(self, project_path: Union[str, Path]): + super().__init__(project_path / ".feature") + + self.set_description("") + self.set_task("") + super()["progress.json"] = {"done": []} + + def get_description(self) -> str: + """ + Retrieve the content of the feature file in the database. + + Returns + ------- + str + The content of the feature file. + """ + return super()["description"] + + def set_description(self, feature_description: str): + """ + Updates the feature file with new text. + + Parameters + ---------- + feature_description : str + The new feature_description to write to the feature file. + """ + super()["description"] = feature_description + + def get_progress(self) -> dict: + """ + Retrieve the progress object. + + Returns + ------- + str + The content of the feature file. + """ + return json.load(super()["progress.json"]) + + def update_progress(self, task: str): + """ + Updates the progress with a new completed task. + + Parameters + ---------- + feature_description : str + The new feature_description to write to the feature file. + """ + progress= self.get_progress() + new_progress = progress['done'].append(task) + super()["progress.json"] = json.dumps(new_progress, indent=4) + + def set_task(self, task: str): + """ + Updates the task file with new text. + + Parameters + ---------- + task : str + The new task to write to the feature file. + """ + super()["task"] = task + + def get_task(self) -> str: + """ + Retrieve the content of the feature file in the database. + + Returns + ------- + str + The content of the feature file. + """ + return super()["task"] + + + def complete_task(self): + """ + Moves the current task to the 'done' list in the progress.json file and clears the task file. + """ + task = self.get_task() + + if task: + self.update_progress(task) + self.set_task("") + diff --git a/gpt_engineer/applications/interactive_cli/file_selection.py b/gpt_engineer/applications/interactive_cli/file_selection.py new file mode 100644 index 0000000000..eb58d2b5be --- /dev/null +++ b/gpt_engineer/applications/interactive_cli/file_selection.py @@ -0,0 +1,201 @@ +import os +import subprocess +import yaml +import platform +import yaml +from typing import List, Tuple + + +class FileSelection: + """ + Manages the active files in a project directory and creates a YAML file listing them. + """ + def __init__(self, project_path: str, repository): + self.repository = repository + self.yaml_path = os.path.join(project_path, '.ticket', 'files.yml') + self._initialize() + + def _create_nested_structure_from_file_paths(self, files_paths): + files_paths.sort() + file_structure = [] + for filepath in files_paths: + parts = filepath.split('/') + # Filter out the '.ticket' directory from paths + if '.ticket' in parts or '.feature' in parts: + continue + node = file_structure + for i, part in enumerate(parts[:-1]): + # Append '/' to part if it's a directory + directory = part if part.endswith('/') else part + '/' + found = False + for item in node: + if isinstance(item, dict) and directory in item: + node = item[directory] + found = True + break + if not found: + new_node = [] + # Insert directory at the correct position (before any file) + index = next((idx for idx, item in enumerate(node) if isinstance(item, str)), len(node)) + node.insert(index, {directory: new_node}) + node = new_node + # Add the file to the last node, ensuring directories are listed first + if not parts[-1].endswith('/'): + node.append(parts[-1]) + + return file_structure + + + def _write_yaml_with_comments(self, yaml_content): + with open(self.yaml_path, 'w') as file: + file.write(f"""# Complete list of files shared with the AI +# Please comment out any files not needed as context for this change +# This saves money and avoids overwhelming the AI +{yaml_content}""") + + def _initialize(self): + """ + Generates a YAML file from the tracked files if one doesnt exist + """ + + if os.path.exists(self.yaml_path): + return + + print("YAML file is missing or empty, generating YAML...") + + file_structure = self._create_nested_structure_from_file_paths(self.repository.get_tracked_files()) + + self._write_yaml_with_comments( + yaml.safe_dump(file_structure, default_flow_style=False, sort_keys=False, indent=2) + ) + + + def _get_from_yaml(self) -> Tuple[List[str], List[str]]: + with open(self.yaml_path, 'r') as file: + original_content = file.readlines()[3:] # Skip the 3 instruction lines + + # Create a version of the content with all lines uncommented + uncommented_content = ''.join(line.lstrip('# ') for line in original_content) + + # Load the original and uncommented content as YAML + original_structure = yaml.safe_load(''.join(original_content)) + uncommented_structure = yaml.safe_load(uncommented_content) + + def recurse_items(items, path=""): + paths = [] + if isinstance(items, dict): + for key, value in items.items(): + new_path = os.path.join(path, key) + paths.extend(recurse_items(value, new_path)) + elif isinstance(items, list): + for item in items: + if isinstance(item, dict): + paths.extend(recurse_items(item, path)) + else: + paths.append(os.path.join(path, item)) + else: + paths.append(path) + return paths + + original_paths = recurse_items(original_structure) + uncommented_paths = recurse_items(uncommented_structure) + + # Determine excluded files by finding the difference + excluded_files = list(set(uncommented_paths) - set(original_paths)) + + return (original_paths, excluded_files) + + def _set_to_yaml(self, selected_files, excluded_files): + + # Dont worry about commenting lines if they are no excluded files + if not excluded_files: + file_structure = self._create_nested_structure_from_file_paths(selected_files) + + self._write_yaml_with_comments( + yaml.safe_dump(file_structure, default_flow_style=False, sort_keys=False, indent=2) + ) + + return + + all_files = list(selected_files) + list(excluded_files) + + current_structure = self._create_nested_structure_from_file_paths(all_files) + + # Add a # in front of files which are excluded. This is a marker for us to go back and properly comment them out + def mark_excluded_files(structure, prefix=""): + for i, item in enumerate(structure): + if isinstance(item, dict): + for key, value in item.items(): + mark_excluded_files(value, prefix + key) + else: + full_path = prefix + item + if full_path in excluded_files: + structure[i] = f"#{item}" + + mark_excluded_files(current_structure) + + # Find all files marked for commenting - add comment and remove the mark. + def comment_marked_files(yaml_content): + lines = yaml_content.split('\n') + + updated_lines = [] + for line in lines: + if '#' in line: + line = '#' + line.replace('#', '').strip() + updated_lines.append(line) + + return '\n'.join(updated_lines) + + content = yaml.safe_dump(current_structure, default_flow_style=False, sort_keys=False, indent=2) + + updated_content = comment_marked_files(content) + + self._write_yaml_with_comments(updated_content) + + return + + + def update_yaml_from_tracked_files(self): + """ + Updates the YAML file with the current list of tracked files. + """ + + tracked_files = self.repository.get_tracked_files() + + selected_files, excluded_files = self._get_from_yaml() + + print(set(selected_files + excluded_files)) + print(set(tracked_files)) + + # If there are no changes, do nothing + if set(tracked_files) == set(selected_files + excluded_files): + print('yep') + return + + new_selected_files = list(set(tracked_files) - set(excluded_files)) + + self._set_to_yaml(new_selected_files, excluded_files) + + def get_from_yaml(self): + """ + Get selected file paths from yaml + """ + + selected_files, excluded_files = self._get_from_yaml() + + return selected_files + + + def open_yaml_in_editor(self): + """ + Opens the generated YAML file in the default system editor. + If the YAML file is empty or doesn't exist, generate it first. + """ + + # Platform-specific methods to open the file + if platform.system() == 'Windows': + os.startfile(self.yaml_path) + elif platform.system() == 'Darwin': + subprocess.run(['open', self.yaml_path]) + else: # Linux and other Unix-like systems + subprocess.run(['xdg-open', self.yaml_path]) diff --git a/gpt_engineer/applications/interactive_cli/files.py b/gpt_engineer/applications/interactive_cli/files.py new file mode 100644 index 0000000000..ee1e051986 --- /dev/null +++ b/gpt_engineer/applications/interactive_cli/files.py @@ -0,0 +1,25 @@ +from gpt_engineer.core.files_dict import FilesDict +from pathlib import Path + +class Files(FilesDict): + def __init__(self, project_path: str, selected_files: list): + """ + Initialize the Files object by reading the content of the provided file paths. + + Parameters + ---------- + project_path : str + The base path of the project. + selected_files : list + List of file paths relative to the project path. + """ + content_dict = {} + for file_path in selected_files: + try: + with open(Path(project_path) / file_path, "r", encoding="utf-8") as content: + content_dict[str(file_path)] = content.read() + except FileNotFoundError: + print(f"Warning: File not found {file_path}") + except UnicodeDecodeError: + print(f"Warning: File not UTF-8 encoded {file_path}, skipping") + super().__init__(content_dict) \ No newline at end of file diff --git a/gpt_engineer/applications/interactive_cli/generation_tools.py b/gpt_engineer/applications/interactive_cli/generation_tools.py index 45f23d4af0..d55ae366fe 100644 --- a/gpt_engineer/applications/interactive_cli/generation_tools.py +++ b/gpt_engineer/applications/interactive_cli/generation_tools.py @@ -1,5 +1,7 @@ from gpt_engineer.core.ai import AI from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler +import xml.etree.ElementTree as ET + def generate_branch_name(ai: AI, feature_description: str) -> str: @@ -21,10 +23,39 @@ def generate_branch_name(ai: AI, feature_description: str) -> str: return messages[-1].content.strip() +class TaskResponse: + def __init__(self, planning_thoughts, tasks, closing_remarks): + self.planning_thoughts = planning_thoughts + self.tasks = tasks + self.closing_remarks = closing_remarks + + def __str__(self): + return f"Planning Thoughts: {self.planning_thoughts}\nTasks: {'; '.join(self.tasks)}\nClosing Remarks: {self.closing_remarks}" + + +def parse_task_xml_to_class(xml_data): + # Parse the XML data + root = ET.fromstring(xml_data) + + # Extract the planning thoughts + planning_thoughts = root.find('PlanningThoughts').text.strip() + + # Extract tasks + tasks = [task.text.strip() for task in root.findall('.//Task')] + + # Extract closing remarks + closing_remarks = root.find('ClosingRemarks').text.strip() + + # Create an instance of the response class + response = TaskResponse(planning_thoughts, tasks, closing_remarks) + + return response + + def generate_suggested_tasks(ai: AI, input: str) -> str: system_prompt = """ You are a software engineer work planning tool. Given a feature description, a list of tasks already completed, and sections of the code -repository we are working on, suggest a list of tasks to be done in order move towards the end goal of completing the feature. +repository we are working on, suggest a list of tasks to be done in order to move towards the end goal of completing the feature. First start by outputting your planning thoughts: an overview of what we are trying to achieve, what we have achieved so far, and what is left to be done. @@ -52,10 +83,13 @@ def generate_suggested_tasks(ai: AI, input: str) -> str: Respond in XML and nothing else. """ - ai.llm.callbacks.clear() # silent + # ai.llm.callbacks.clear() # silent messages = ai.start(system_prompt, input,step_name="suggest-tasks") - ai.llm.callbacks.append(StreamingStdOutCallbackHandler()) + # ai.llm.callbacks.append(StreamingStdOutCallbackHandler()) - return messages[-1].content.strip() \ No newline at end of file + xml = messages[-1].content.strip() + + return parse_task_xml_to_class(xml).tasks + diff --git a/gpt_engineer/applications/interactive_cli/main.py b/gpt_engineer/applications/interactive_cli/main.py index c67015dc72..389ecfc0db 100644 --- a/gpt_engineer/applications/interactive_cli/main.py +++ b/gpt_engineer/applications/interactive_cli/main.py @@ -6,72 +6,51 @@ from prompt_toolkit.validation import Validator, ValidationError from gpt_engineer.core.ai import AI +from gpt_engineer.core.prompt import Prompt -from generation_tools import generate_branch_name, generate_suggested_tasks -from repository import Repository +from generation_tools import generate_suggested_tasks +from repository import Repository, GitContext +from file_selection import FileSelection +from files import Files +from agent import FeatureAgent +from feature import Feature app = typer.Typer() -class FeatureValidator(Validator): - def validate(self, document): - text = document.text - if not text: - raise ValidationError(message="Feature description cannot be empty", cursor_position=len(text)) +def get_contenxt_string(feature:Feature ,git_context:GitContext): + return f"""I am working on a feature but breaking it up into small incremental tasks. Your job is to complete the incremental task provided to you - only that task and nothign more. + +The purpose of this message is to give you wider context around the feature you are working on and what incremental tasks have already been completed so far. -def initialize_new_feature(ai, ticket): - - ticket.clear_ticket() - - feature_description = prompt( - "Write feature description: ", - multiline=True, - validator=FeatureValidator(), - bottom_toolbar="Press Ctrl+O to finish" - ) - - ticket.feature = feature_description - ticket.save_feature() - - # print("\n Ticket files created at .ticket \n ") - - branch_name = generate_branch_name(ai, feature_description) - - branch_name = prompt('\nConfirm branch name: ', default=branch_name) - - # todo: use gitpython to create new branch. - - print(f'\nFeature branch created.\n') - - - - -def get_context_string(ticket, git_context, code): - input = f""" -## Feature -{ticket.feature} +## Feature - this is the description fo the current feature we are working on. +{feature.get_description()} -## Completed Tasks -{ticket.progress.done} +## Completed Tasks - these are the lists of tasks you have completed so far on the feature branch. +{feature.get_progress()["done"]} -## Git Context -### Commits -{git_context.commits} +## Git Context - these are the code changes made so far while implementing this feature. This may include work completed by you on previous tasks as well as changes made independently by me. +### Branch Changes - this is the cumulative diff of all the commits so far on the feature branch. +{git_context.branch_changes} -### Staged Changes +### Staged Changes - this is the diff of the current staged changes. {git_context.staged_changes} +""" + +def get_full_context_string(ticket, git_context, files: Files): + return f"""{get_contenxt_string(ticket, git_context)} -## Current Codebase -{code} +## Current Codebase - this is the as is view of the current code base including any unstaged changes. +{files.to_chat()} """ -def choose_next_task(ai, ticket, context): +def choose_next_task(ai, ticket, git_context: GitContext, files: Files): print(f"There are {len(ticket.progress.done)} tasks completed so far. What shall we do next?") - suggested_tasks = generate_suggested_tasks() - + context_string = get_full_context_string(ticket, git_context, files) + suggested_tasks_xml = generate_suggested_tasks() @app.command() @@ -118,22 +97,15 @@ def main( repository = Repository(project_path) - ticket = Ticket.load_or_create_at_directory(project_path) - - if new: - initialize_new_feature(ai, ticket) - - git_context = repository.get_git_context() - - print(git_context.staged_changes) - print(git_context.unstaged_changes) - for commit in git_context.commits: - print(commit) - print() - - # todo: continue with the rest of the task creation flow. Every time a task is added move it to a task file + feature = Feature(project_path) + agent = FeatureAgent(project_path, feature, repository, ai) + if new: + agent.init() + else: + agent.resume() + if __name__ == "__main__": app() \ No newline at end of file diff --git a/gpt_engineer/applications/interactive_cli/repository.py b/gpt_engineer/applications/interactive_cli/repository.py index 96870ba856..81d50e00c2 100644 --- a/gpt_engineer/applications/interactive_cli/repository.py +++ b/gpt_engineer/applications/interactive_cli/repository.py @@ -23,9 +23,10 @@ class GitContext: Represents the Git context of an in progress feature. """ commits: List[Commit] + branch_changes: str staged_changes: str unstaged_changes: str - + tracked_files: List[str] class Repository: @@ -48,6 +49,34 @@ def get_tracked_files(self) -> List[str]: except GitCommandError as e: print(f"Error listing tracked files: {e}") return [] + + def get_feature_branch_diff(self): + """ + Get a consolidated diff for the entire feature branch from its divergence point. + + Returns: + - str: The diff representing all changes from the feature branch since its divergence. + """ + current_branch = self.repo.active_branch + + # Get the tracking branch (e.g., 'origin/master') + tracking_branch = current_branch.tracking_branch() + if tracking_branch is None: + print("No tracking branch set, using 'master' as default base branch.") + tracking_branch = self.repo.heads.master # Fallback to 'master' + + try: + # Find the merge base between the current branch and the tracking branch or master + merge_base = self.repo.merge_base(tracking_branch, current_branch) + if merge_base: + merge_base = merge_base[0] # GitPython might return a list of merge bases + + # Generate the diff from the merge base to the latest commit of the feature branch + feature_diff = self.repo.git.diff(f"{merge_base}..{current_branch}", unified=0) + return feature_diff + except GitCommandError as e: + print(f"Error generating diff: {e}") + return "" def get_git_context(self): @@ -65,4 +94,8 @@ def get_git_context(self): for commit in commits ] - return GitContext(commit_objects, staged_changes, unstaged_changes) \ No newline at end of file + branch_changes = self.get_feature_branch_diff() + + tracked_files = self.get_tracked_files() + + return GitContext(commit_objects, branch_changes, staged_changes, unstaged_changes, tracked_files) \ No newline at end of file diff --git a/gpt_engineer/applications/interactive_cli/ticket.py b/gpt_engineer/applications/interactive_cli/ticket.py deleted file mode 100644 index eee2bb8322..0000000000 --- a/gpt_engineer/applications/interactive_cli/ticket.py +++ /dev/null @@ -1,106 +0,0 @@ -import json -from pathlib import Path - -class Ticket: - """ - Represents a ticket with a feature, task, and progress. - """ - def __init__(self, feature: str, task: str, progress: dict): - self.feature = feature - self.task = task - self.progress = progress - - @classmethod - def load_or_create_at_directory(cls, project_path: str) -> "Ticket": - """ - Load the ticket data from a directory. - """ - feature_path = Path(project_path) / ".ticket" / "feature" - task_path = Path(project_path) / ".ticket" / "task" - progress_path = Path(project_path) / ".ticket" / "progress.json" - - if not feature_path.exists(): - feature = "" - cls._save_feature() - else: - with open(feature_path, 'r', encoding='utf-8') as file: - feature = file.read().strip() - - if not task_path.exists(): - task = "" - cls._save_task() - else: - with open(task_path, 'r', encoding='utf-8') as file: - task = file.read().strip() - - if not progress_path.exists(): - progress = {"todo": [], "done": []} - cls._save_progress() - else: - with open(progress_path, 'r', encoding='utf-8') as file: - progress = json.load(file) - - return cls(feature, task, progress) - - - def clear_ticket(self): - """ - Clears the feature and task files and resets the progress.json file. - """ - self.feature = "" - self.task = "" - self.progress = {"todo": [], "done": []} - self._save() - - def update_feature(self, text: str): - """ - Updates the feature file with new text. - - Parameters - ---------- - text : str - The new text to write to the feature file. - """ - self.feature = text - self._save_feature() - - def update_task(self, text: str): - """ - Updates the task file with new text. - - Parameters - ---------- - text : str - The new text to write to the task file. - """ - self.task = text - self._save_task() - - def complete_task(self): - """ - Moves the current task to the 'done' list in the progress.json file and clears the task file. - """ - if self.task: - self.progress['done'].append(self.task) - self.task = "" - self._save() - - def _save(self): - """ - Helper method to save the feature, task, and progress to their respective files. - """ - self._save_feature() - self._save_task() - self._save_progress() - - def _save_feature(self): - with open(Path(self.project_path) / ".ticket" / "feature", 'w', encoding='utf-8') as file: - file.write(self.feature) - - def _save_task(self): - with open(Path(self.project_path) / ".ticket" / "task", 'w', encoding='utf-8') as file: - file.write(self.task) - - def _save_progress(self): - with open(Path(self.project_path) / ".ticket" / "progress.json", 'w', encoding='utf-8') as file: - json.dump(self.progress, file, indent=4) \ No newline at end of file From 6c8e9d8bf8a828560f6ed9566e930c759a1e7580 Mon Sep 17 00:00:00 2001 From: Theo McCabe Date: Sun, 12 May 2024 23:23:48 +0100 Subject: [PATCH 09/36] add steps --- .../interactive_cli/example_project/active_files.yml | 10 ---------- gpt_engineer/core/default/steps.py | 10 ++++++++-- 2 files changed, 8 insertions(+), 12 deletions(-) delete mode 100644 gpt_engineer/applications/interactive_cli/example_project/active_files.yml diff --git a/gpt_engineer/applications/interactive_cli/example_project/active_files.yml b/gpt_engineer/applications/interactive_cli/example_project/active_files.yml deleted file mode 100644 index 1416b31250..0000000000 --- a/gpt_engineer/applications/interactive_cli/example_project/active_files.yml +++ /dev/null @@ -1,10 +0,0 @@ -# Please comment out files you want to exclude from the AI context (this saves money and can avoid overwhelming the AI) -- .ticket/: - - feature - - files.yml - - progress.json - - task -- .gitignore -- README.md -- index.html -- styles.css diff --git a/gpt_engineer/core/default/steps.py b/gpt_engineer/core/default/steps.py index eb0f9e2bb4..6051358b24 100644 --- a/gpt_engineer/core/default/steps.py +++ b/gpt_engineer/core/default/steps.py @@ -37,7 +37,7 @@ import traceback from pathlib import Path -from typing import List, MutableMapping, Union +from typing import List, MutableMapping, Union, Optional from langchain.schema import HumanMessage, SystemMessage from termcolor import colored @@ -274,6 +274,7 @@ def improve_fn( files_dict: FilesDict, memory: BaseMemory, preprompts_holder: PrepromptsHolder, + additional_context: Optional[str] = None, ) -> FilesDict: """ Improves the code based on user input and returns the updated files. @@ -290,6 +291,8 @@ def improve_fn( The memory interface where the code and related data are stored. preprompts_holder : PrepromptsHolder The holder for preprompt messages that guide the AI model. + additional_context :str + Optional additional context to provide to the AI as part of the request Returns ------- @@ -300,7 +303,10 @@ def improve_fn( messages = [ SystemMessage(content=setup_sys_prompt_existing_code(preprompts)), ] - # Add files as input + + if additional_context: + messages.append(HumanMessage(content=additional_context)) + messages.append(HumanMessage(content=f"{files_dict.to_chat()}")) messages.append(HumanMessage(content=prompt.to_langchain_content())) memory.log( From 8d9850c366c61c8d0f4adcf3ecef511e4acb19b8 Mon Sep 17 00:00:00 2001 From: Theo McCabe Date: Mon, 13 May 2024 00:14:50 +0100 Subject: [PATCH 10/36] improve runs --- .../applications/interactive_cli/agent.py | 48 +++++++++++++++++-- .../example_project/.feature/progress.json | 3 -- .../example_project/.gitignore | 5 +- .../applications/interactive_cli/feature.py | 18 +++---- .../interactive_cli/file_selection.py | 48 ++++++++++++++++++- .../applications/interactive_cli/main.py | 1 - gpt_engineer/core/default/disk_memory.py | 3 ++ 7 files changed, 107 insertions(+), 19 deletions(-) diff --git a/gpt_engineer/applications/interactive_cli/agent.py b/gpt_engineer/applications/interactive_cli/agent.py index 523d6ce513..8aeaafbf69 100644 --- a/gpt_engineer/applications/interactive_cli/agent.py +++ b/gpt_engineer/applications/interactive_cli/agent.py @@ -3,6 +3,7 @@ from gpt_engineer.core.ai import AI from gpt_engineer.core.base_agent import BaseAgent +from gpt_engineer.core.files_dict import FilesDict from gpt_engineer.core.preprompts_holder import PrepromptsHolder from gpt_engineer.core.prompt import Prompt from gpt_engineer.core.default.disk_memory import DiskMemory @@ -33,16 +34,20 @@ def __init__( repository: Repository, ai: AI = None, ): + self.project_path = project_path self.feature = feature self.repository = repository self.ai = ai or AI() self.file_selection = FileSelection(project_path, repository) - self.memory=DiskMemory(memory_path(project_path)), + self.memory=DiskMemory(memory_path(project_path)) self.preprompts_holder = PrepromptsHolder(PREPROMPTS_PATH) def init(self): + + self.feature.clear_feature() + feature_description = cli_input( "Write feature description: ", multiline=True, @@ -68,6 +73,8 @@ def init(self): self.resume() + def improve(self): + self.resume() def resume(self): @@ -80,15 +87,48 @@ def resume(self): self.file_selection.update_yaml_from_tracked_files() - context_string = "get context string " + context_string = self.get_contenxt_string(self.feature, git_context) files = Files(self.project_path, self.file_selection.get_from_yaml()) - task = "get_task" + feature = self.feature.get_description() + file_string = self.file_selection.get_pretty_from_yaml() + task = self.feature.get_task() + + + # list feature and task + print(f"Feature: {feature}\n\n") + print(f"Files: {file_string}\n\n") + print(f"Task: {task}\n\n") + + # do you want to attempt this task? + if cli_input("Do you want to attempt this task? y/n: ", default='y').lower() not in ["y", "yes"]: + print("Ok, not proceeding. Perhaps you should update the feature and retry") + return + # if no: do you want to edit feature? edit task? complete? or cancel? prompt = Prompt(task) - improve_fn(files, prompt, files, self.memory, None, context_string) + improve_fn(self.ai, prompt, files, self.memory, self.preprompts_holder, context_string) + + + def get_contenxt_string(self, feature, git_context:GitContext): + return f"""I am working on a feature but breaking it up into small incremental tasks. Your job is to complete the incremental task provided to you - only that task and nothign more. + +The purpose of this message is to give you wider context around the feature you are working on and what incremental tasks have already been completed so far. + +## Feature - this is the description fo the current feature we are working on. +{feature.get_description()} + +## Completed Tasks - these are the lists of tasks you have completed so far on the feature branch. +{feature.get_progress()["done"]} + +## Git Context - these are the code changes made so far while implementing this feature. This may include work completed by you on previous tasks as well as changes made independently by me. +### Branch Changes - this is the cumulative diff of all the commits so far on the feature branch. +{git_context.branch_changes} +### Staged Changes - this is the diff of the current staged changes. +{git_context.staged_changes} +""" diff --git a/gpt_engineer/applications/interactive_cli/example_project/.feature/progress.json b/gpt_engineer/applications/interactive_cli/example_project/.feature/progress.json index e257633cdd..0fe5788006 100644 --- a/gpt_engineer/applications/interactive_cli/example_project/.feature/progress.json +++ b/gpt_engineer/applications/interactive_cli/example_project/.feature/progress.json @@ -1,7 +1,4 @@ { - "todo": [ - "task 3" - ], "done": [ "task 1", "task 2" diff --git a/gpt_engineer/applications/interactive_cli/example_project/.gitignore b/gpt_engineer/applications/interactive_cli/example_project/.gitignore index 12ec290c57..8577ad31af 100644 --- a/gpt_engineer/applications/interactive_cli/example_project/.gitignore +++ b/gpt_engineer/applications/interactive_cli/example_project/.gitignore @@ -1,3 +1,4 @@ -#.ticket - track this folder for testing +#.feature - track this folder for testing -ignored_test \ No newline at end of file +.gpteng +ignored_test diff --git a/gpt_engineer/applications/interactive_cli/feature.py b/gpt_engineer/applications/interactive_cli/feature.py index 6fc3995565..7c75cf96ab 100644 --- a/gpt_engineer/applications/interactive_cli/feature.py +++ b/gpt_engineer/applications/interactive_cli/feature.py @@ -13,11 +13,13 @@ class Feature(DiskMemory): and progress (history of incremental work completed) """ def __init__(self, project_path: Union[str, Path]): - super().__init__(project_path / ".feature") + super().__init__(Path(project_path) / ".feature") + + def clear_feature(self) -> None: self.set_description("") self.set_task("") - super()["progress.json"] = {"done": []} + super().__setitem__("progress.json", json.dumps({"done": []})) def get_description(self) -> str: """ @@ -28,7 +30,7 @@ def get_description(self) -> str: str The content of the feature file. """ - return super()["description"] + return super().__getitem__("description") def set_description(self, feature_description: str): """ @@ -39,7 +41,7 @@ def set_description(self, feature_description: str): feature_description : str The new feature_description to write to the feature file. """ - super()["description"] = feature_description + super().__setitem__("description", feature_description) def get_progress(self) -> dict: """ @@ -50,7 +52,7 @@ def get_progress(self) -> dict: str The content of the feature file. """ - return json.load(super()["progress.json"]) + return json.loads(super().__getitem__("progress.json")) def update_progress(self, task: str): """ @@ -63,7 +65,7 @@ def update_progress(self, task: str): """ progress= self.get_progress() new_progress = progress['done'].append(task) - super()["progress.json"] = json.dumps(new_progress, indent=4) + super().__setitem__("progress.json", json.dumps(new_progress, indent=4)) def set_task(self, task: str): """ @@ -74,7 +76,7 @@ def set_task(self, task: str): task : str The new task to write to the feature file. """ - super()["task"] = task + super().__setitem__("task",task) def get_task(self) -> str: """ @@ -85,7 +87,7 @@ def get_task(self) -> str: str The content of the feature file. """ - return super()["task"] + return super().__getitem__("task") def complete_task(self): diff --git a/gpt_engineer/applications/interactive_cli/file_selection.py b/gpt_engineer/applications/interactive_cli/file_selection.py index eb58d2b5be..8e57e5e9e2 100644 --- a/gpt_engineer/applications/interactive_cli/file_selection.py +++ b/gpt_engineer/applications/interactive_cli/file_selection.py @@ -12,7 +12,7 @@ class FileSelection: """ def __init__(self, project_path: str, repository): self.repository = repository - self.yaml_path = os.path.join(project_path, '.ticket', 'files.yml') + self.yaml_path = os.path.join(project_path, '.feature', 'files.yml') self._initialize() def _create_nested_structure_from_file_paths(self, files_paths): @@ -184,6 +184,52 @@ def get_from_yaml(self): selected_files, excluded_files = self._get_from_yaml() return selected_files + + + def get_pretty_from_yaml(self): + """ + Retrieves selected file paths from the YAML file and prints them in an ASCII-style tree structure. + """ + # Get selected files from YAML + selected_files = self.get_from_yaml() + + # Helper function to insert a path into the tree dictionary + def insert_path(tree, path_parts): + # Recursively build nested dictionary from path parts + if not path_parts: + return + if path_parts[0] not in tree: + tree[path_parts[0]] = {} + insert_path(tree[path_parts[0]], path_parts[1:]) + + # Create a nested dictionary from the list of file paths + file_tree = {} + for filepath in selected_files: + parts = filepath.split('/') + insert_path(file_tree, parts) + + # Helper function to format the tree into a string with ASCII graphics + def format_tree(tree, prefix=''): + lines = [] + # Sorted to keep alphabetical order + items = sorted(tree.items()) + for i, (key, sub_tree) in enumerate(items): + if i == len(items) - 1: # Last item uses └── + lines.append(prefix + '└── ' + key) + extension = ' ' + else: + lines.append(prefix + '├── ' + key) + extension = '│ ' + if sub_tree: + lines.extend(format_tree(sub_tree, prefix=prefix + extension)) + return lines + + # Generate formatted tree lines + tree_lines = format_tree(file_tree) + + # Join lines and return as a string + return '\n'.join(tree_lines) + def open_yaml_in_editor(self): diff --git a/gpt_engineer/applications/interactive_cli/main.py b/gpt_engineer/applications/interactive_cli/main.py index 389ecfc0db..8e18049224 100644 --- a/gpt_engineer/applications/interactive_cli/main.py +++ b/gpt_engineer/applications/interactive_cli/main.py @@ -1,6 +1,5 @@ import typer from dotenv import load_dotenv -from ticket import Ticket from prompt_toolkit import prompt from prompt_toolkit.validation import Validator, ValidationError diff --git a/gpt_engineer/core/default/disk_memory.py b/gpt_engineer/core/default/disk_memory.py index 62c7daf32b..e1936fc8c4 100644 --- a/gpt_engineer/core/default/disk_memory.py +++ b/gpt_engineer/core/default/disk_memory.py @@ -171,6 +171,9 @@ def __setitem__(self, key: Union[str, Path], val: str) -> None: full_path.write_text(val, encoding="utf-8") + def set(self, key: Union[str, Path], val: str) -> None: + return self.__setitem__(key, val) + def __delitem__(self, key: Union[str, Path]) -> None: """ Delete a file or directory from the database corresponding to the given key. From 6470e30aa1c95cf9d0078f46f542a8fd97aa32ac Mon Sep 17 00:00:00 2001 From: Theo McCabe Date: Mon, 13 May 2024 12:54:25 +0100 Subject: [PATCH 11/36] linting pre commit --- .gitignore | 2 +- .../applications/cli/file_selector.py | 10 +- .../applications/interactive_cli/agent.py | 104 +++++++--------- .../cli-code_samples/edit_file_tree.py | 49 ++++---- .../cli-code_samples/edit_yaml_paths.py | 82 +++++++------ .../interactive_cli/cli-code_samples/test1.py | 14 ++- .../interactive_cli/cli-code_samples/test2.py | 19 +-- .../interactive_cli/cli-code_samples/test3.py | 20 +-- .../interactive_cli/cli-code_samples/test4.py | 17 +-- .../example_project/.feature/description | 2 +- .../example_project/.feature/files.yml | 2 +- .../example_project/.feature/progress.json | 2 +- .../example_project/.feature/task | 6 +- .../interactive_cli/example_project/README.md | 2 +- .../example_project/index.html | 2 +- .../example_project/styles.css | 2 +- .../applications/interactive_cli/feature.py | 24 ++-- .../interactive_cli/file_selection.py | 114 ++++++++++-------- .../applications/interactive_cli/files.py | 10 +- .../interactive_cli/generation_tools.py | 85 +++++++++---- .../applications/interactive_cli/main.py | 62 ++-------- .../interactive_cli/repository.py | 38 ++++-- gpt_engineer/core/default/steps.py | 4 +- gpt_engineer/core/prompt.py | 6 +- 24 files changed, 349 insertions(+), 329 deletions(-) diff --git a/.gitignore b/.gitignore index 94000e076a..5cbb23225b 100644 --- a/.gitignore +++ b/.gitignore @@ -93,4 +93,4 @@ webapp/.next/ gpt_engineer/benchmark/benchmarks/apps/dataset gpt_engineer/benchmark/benchmarks/mbpp/dataset -prompt \ No newline at end of file +prompt diff --git a/gpt_engineer/applications/cli/file_selector.py b/gpt_engineer/applications/cli/file_selector.py index 076672d5e1..17b8b10f34 100644 --- a/gpt_engineer/applications/cli/file_selector.py +++ b/gpt_engineer/applications/cli/file_selector.py @@ -19,8 +19,8 @@ import fnmatch import os +import platform import subprocess -import platform from pathlib import Path from typing import Any, Dict, Generator, List, Union @@ -212,12 +212,12 @@ def open_with_default_editor(self, file_path: Union[str, Path]): pass # Platform-specific methods to open the file - if platform.system() == 'Windows': + if platform.system() == "Windows": os.startfile(file_path) - elif platform.system() == 'Darwin': - subprocess.run(['open', file_path]) + elif platform.system() == "Darwin": + subprocess.run(["open", file_path]) else: # Linux and other Unix-like systems - subprocess.run(['xdg-open', file_path]) + subprocess.run(["xdg-open", file_path]) def is_utf8(self, file_path: Union[str, Path]) -> bool: """ diff --git a/gpt_engineer/applications/interactive_cli/agent.py b/gpt_engineer/applications/interactive_cli/agent.py index 8aeaafbf69..f7efd9d86d 100644 --- a/gpt_engineer/applications/interactive_cli/agent.py +++ b/gpt_engineer/applications/interactive_cli/agent.py @@ -1,32 +1,34 @@ +from feature import Feature +from file_selection import FileSelection +from files import Files +from generation_tools import build_context_string, generate_branch_name from prompt_toolkit import prompt as cli_input -from prompt_toolkit.validation import Validator, ValidationError +from prompt_toolkit.validation import ValidationError, Validator +from repository import Repository from gpt_engineer.core.ai import AI from gpt_engineer.core.base_agent import BaseAgent -from gpt_engineer.core.files_dict import FilesDict -from gpt_engineer.core.preprompts_holder import PrepromptsHolder -from gpt_engineer.core.prompt import Prompt from gpt_engineer.core.default.disk_memory import DiskMemory from gpt_engineer.core.default.paths import PREPROMPTS_PATH, memory_path from gpt_engineer.core.default.steps import improve_fn +from gpt_engineer.core.preprompts_holder import PrepromptsHolder +from gpt_engineer.core.prompt import Prompt -from feature import Feature -from repository import Repository, GitContext -from file_selection import FileSelection -from files import Files -from generation_tools import generate_branch_name - class FeatureValidator(Validator): def validate(self, document): text = document.text if not text: - raise ValidationError(message="Feature description cannot be empty", cursor_position=len(text)) + raise ValidationError( + message="Feature description cannot be empty", cursor_position=len(text) + ) + class FeatureAgent(BaseAgent): """ A cli agent which implements a feature as a set of incremental tasks """ + def __init__( self, project_path: str, @@ -40,32 +42,30 @@ def __init__( self.ai = ai or AI() self.file_selection = FileSelection(project_path, repository) - self.memory=DiskMemory(memory_path(project_path)) + self.memory = DiskMemory(memory_path(project_path)) self.preprompts_holder = PrepromptsHolder(PREPROMPTS_PATH) - def init(self): - self.feature.clear_feature() - + feature_description = cli_input( - "Write feature description: ", - multiline=True, - validator=FeatureValidator(), - bottom_toolbar="Press Ctrl+O to finish" - ) - + "Write feature description: ", + multiline=True, + validator=FeatureValidator(), + bottom_toolbar="Press Ctrl+O to finish", + ) + self.feature.set_description(feature_description) - + # print("\n Ticket files created at .ticket \n ") branch_name = generate_branch_name(self.ai, feature_description) - branch_name = cli_input('\nConfirm branch name: ', default=branch_name) + branch_name = cli_input("\nConfirm branch name: ", default=branch_name) - # todo: use gitpython to create new branch. - - print(f'\nFeature branch created.\n') + # todo: use gitpython to create new branch. + + print("\nFeature branch created.\n") self.file_selection.update_yaml_from_tracked_files() self.file_selection.open_yaml_in_editor() @@ -75,60 +75,42 @@ def init(self): def improve(self): self.resume() - - def resume(self): + def resume(self): git_context = self.repository.get_git_context() - if git_context.unstaged_changes: - if input("Unstaged changes present are you sure you want to proceed? y/n").lower() not in ["", "y", "yes"]: + if git_context.unstaged_changes: + if input( + "Unstaged changes present are you sure you want to proceed? y/n" + ).lower() not in ["", "y", "yes"]: print("Ok, not proceeding.") return - + self.file_selection.update_yaml_from_tracked_files() - context_string = self.get_contenxt_string(self.feature, git_context) - + context_string = build_context_string(self.feature, git_context) + files = Files(self.project_path, self.file_selection.get_from_yaml()) feature = self.feature.get_description() file_string = self.file_selection.get_pretty_from_yaml() task = self.feature.get_task() - - # list feature and task + # list feature, files and task print(f"Feature: {feature}\n\n") print(f"Files: {file_string}\n\n") print(f"Task: {task}\n\n") - # do you want to attempt this task? - if cli_input("Do you want to attempt this task? y/n: ", default='y').lower() not in ["y", "yes"]: + # do you want to attempt this task? + if cli_input( + "Do you want to attempt this task? y/n: ", default="y" + ).lower() not in ["y", "yes"]: print("Ok, not proceeding. Perhaps you should update the feature and retry") return # if no: do you want to edit feature? edit task? complete? or cancel? - prompt = Prompt(task) - - improve_fn(self.ai, prompt, files, self.memory, self.preprompts_holder, context_string) - - - def get_contenxt_string(self, feature, git_context:GitContext): - return f"""I am working on a feature but breaking it up into small incremental tasks. Your job is to complete the incremental task provided to you - only that task and nothign more. - -The purpose of this message is to give you wider context around the feature you are working on and what incremental tasks have already been completed so far. - -## Feature - this is the description fo the current feature we are working on. -{feature.get_description()} - -## Completed Tasks - these are the lists of tasks you have completed so far on the feature branch. -{feature.get_progress()["done"]} - -## Git Context - these are the code changes made so far while implementing this feature. This may include work completed by you on previous tasks as well as changes made independently by me. -### Branch Changes - this is the cumulative diff of all the commits so far on the feature branch. -{git_context.branch_changes} - -### Staged Changes - this is the diff of the current staged changes. -{git_context.staged_changes} -""" - + prompt = Prompt(task, prefix="Task: ") + improve_fn( + self.ai, prompt, files, self.memory, self.preprompts_holder, context_string + ) diff --git a/gpt_engineer/applications/interactive_cli/cli-code_samples/edit_file_tree.py b/gpt_engineer/applications/interactive_cli/cli-code_samples/edit_file_tree.py index 924660b8ad..6ae78694a4 100644 --- a/gpt_engineer/applications/interactive_cli/cli-code_samples/edit_file_tree.py +++ b/gpt_engineer/applications/interactive_cli/cli-code_samples/edit_file_tree.py @@ -1,13 +1,12 @@ -from dataclasses import dataclass, field -from typing import Dict, Optional, List from prompt_toolkit import PromptSession from prompt_toolkit.application import Application from prompt_toolkit.key_binding import KeyBindings -from prompt_toolkit.widgets import TextArea -from prompt_toolkit.layout.layout import Layout +from prompt_toolkit.layout import Window from prompt_toolkit.layout.containers import HSplit from prompt_toolkit.layout.controls import FormattedTextControl -from prompt_toolkit.buffer import Buffer +from prompt_toolkit.layout.layout import Layout +from prompt_toolkit.widgets import TextArea + def generate_file_tree(files): """ @@ -15,13 +14,14 @@ def generate_file_tree(files): """ tree = {} for file in files: - parts = file.split('/') + parts = file.split("/") node = tree for part in parts: node = node.setdefault(part, {}) return tree -def generate_tree_string(node, prefix=''): + +def generate_tree_string(node, prefix=""): """ Recursively generates a string representation of the file tree. """ @@ -38,36 +38,38 @@ def generate_tree_string(node, prefix=''): lines.append(f"{prefix}{connector} {key}") return lines + def get_editable_tree(files): tree = generate_file_tree(files) tree_lines = generate_tree_string(tree) - return '\n'.join(tree_lines) + return "\n".join(tree_lines) -from prompt_toolkit.layout import Window def interactive_edit_files(files): - session = PromptSession() + PromptSession() # Generate editable file tree editable_tree = get_editable_tree(files) # Text area for file tree - text_area = TextArea(text=editable_tree, - scrollbar=True, - multiline=True, - wrap_lines=False) - + text_area = TextArea( + text=editable_tree, scrollbar=True, multiline=True, wrap_lines=False + ) + # Ensure the text area starts in insert mode # text_area.buffer.cursor_position += len(text_area.text) text_area.buffer.insert_mode = False # Instructions wrapped in a Window - instructions = Window(content=FormattedTextControl( - text='Please comment out unneeded files to reduce context overhead.\n' - 'You can comment out lines by adding "#" at the beginning of the line.\n' - 'Press Ctrl-S to save and exit.'), + instructions = Window( + content=FormattedTextControl( + text="Please comment out unneeded files to reduce context overhead.\n" + 'You can comment out lines by adding "#" at the beginning of the line.\n' + "Press Ctrl-S to save and exit." + ), height=3, # Adjust height as necessary - style='class:instruction') + style="class:instruction", + ) # Container that holds both the instructions and the text area instruction_container = HSplit([instructions, text_area]) @@ -78,15 +80,16 @@ def interactive_edit_files(files): # Add key bindings for custom actions like save bindings = KeyBindings() - @bindings.add('c-s') + @bindings.add("c-s") def _(event): # Saving functionality or further processing can be implemented here - print('Saving and processing your tree...') + print("Saving and processing your tree...") event.app.exit() app = Application(layout=layout, key_bindings=bindings, full_screen=True) app.run() + # Example usage tracked_files = [ "project/src/main/java/com/example/MyApp.java", @@ -94,6 +97,6 @@ def _(event): "project/src/test/java/com/example/MyAppTest.java", "project/src/test/resources/testdata.txt", "project/lib/external-library.jar", - "project/README.md" + "project/README.md", ] interactive_edit_files(tracked_files) diff --git a/gpt_engineer/applications/interactive_cli/cli-code_samples/edit_yaml_paths.py b/gpt_engineer/applications/interactive_cli/cli-code_samples/edit_yaml_paths.py index 048c58d6df..f614f41fd6 100644 --- a/gpt_engineer/applications/interactive_cli/cli-code_samples/edit_yaml_paths.py +++ b/gpt_engineer/applications/interactive_cli/cli-code_samples/edit_yaml_paths.py @@ -1,11 +1,13 @@ -import yaml from pathlib import Path + +import yaml + from prompt_toolkit import Application from prompt_toolkit.key_binding import KeyBindings -from prompt_toolkit.widgets import TextArea from prompt_toolkit.layout import Layout -from prompt_toolkit.widgets import Label from prompt_toolkit.layout.containers import HSplit +from prompt_toolkit.widgets import Label, TextArea + def create_yaml_file(file_paths): """Generates a YAML structure from a list of file paths.""" @@ -17,51 +19,53 @@ def create_yaml_file(file_paths): current = current.setdefault(part, {}) return yaml.dump(root, sort_keys=False) + def edit_yaml(yaml_content): """Opens a Prompt Toolkit session to edit the YAML content.""" kb = KeyBindings() - @kb.add('c-q') - def exit_(event): - " Press Control-Q to exit. " - event.app.exit() - - @kb.add('c-c') - def exit_(event): - " Press Control-Q to exit. " - event.app.exit() - - @kb.add('c-s') - def save_exit(event): - " Press Control-S to save and exit. " - with open('edited_yaml.yaml', 'w') as f: - f.write(text_area.text) - print("File saved as 'edited_yaml.yaml'") - event.app.exit() - - @kb.add('c-t') - def comment_uncomment(event): - """Toggle comment on the current line with Ctrl-T.""" - tb = text_area.buffer - doc = tb.document - cursor_line_num = doc.cursor_position_row - line_text = doc.current_line_before_cursor + doc.current_line_after_cursor - if line_text.strip().startswith('#'): - tb.delete_before_cursor(len(line_text) - len(line_text.lstrip('#'))) - else: - tb.insert_text('#', move_cursor=False) + # @kb.add("c-q") + # def exit_(event): + # "Press Control-Q to exit." + # event.app.exit() + + # @kb.add("c-c") + # def exit_(event): + # "Press Control-Q to exit." + # event.app.exit() + + # @kb.add("c-s") + # def save_exit(event): + # "Press Control-S to save and exit." + # with open("edited_yaml.yaml", "w") as f: + # f.write(text_area.text) + # print("File saved as 'edited_yaml.yaml'") + # event.app.exit() + + # @kb.add("c-t") + # def comment_uncomment(event): + # """Toggle comment on the current line with Ctrl-T.""" + # tb = text_area.buffer + # doc = tb.document + # line_text = doc.current_line_before_cursor + doc.current_line_after_cursor + # if line_text.strip().startswith("#"): + # tb.delete_before_cursor(len(line_text) - len(line_text.lstrip("#"))) + # else: + # tb.insert_text("#", move_cursor=False) text_area = TextArea( text=yaml_content, scrollbar=True, multiline=True, wrap_lines=False, - line_numbers=True + line_numbers=True, ) # Instruction label - instructions = Label(text="Use Ctrl-S to save and exit, Ctrl-Q to quit without saving, Ctrl-T to toggle comment.", - dont_extend_height=True) + instructions = Label( + text="Use Ctrl-S to save and exit, Ctrl-Q to quit without saving, Ctrl-T to toggle comment.", + dont_extend_height=True, + ) # Combine text area and instructions label in a vertical layout layout = Layout(HSplit([text_area, instructions])) @@ -69,17 +73,15 @@ def comment_uncomment(event): app = Application(layout=layout, key_bindings=kb, full_screen=False) app.run() + def main(file_paths): """Generate a YAML file from file paths and open it for editing.""" yaml_data = create_yaml_file(file_paths) edit_yaml(yaml_data) + # Example usage: -file_paths = [ - '/path/to/file1.txt', - '/path/to/file2.txt', - '/path/to/dir/file3.txt' -] +file_paths = ["/path/to/file1.txt", "/path/to/file2.txt", "/path/to/dir/file3.txt"] main(file_paths) diff --git a/gpt_engineer/applications/interactive_cli/cli-code_samples/test1.py b/gpt_engineer/applications/interactive_cli/cli-code_samples/test1.py index 08c8bb8a7a..6b6e166261 100644 --- a/gpt_engineer/applications/interactive_cli/cli-code_samples/test1.py +++ b/gpt_engineer/applications/interactive_cli/cli-code_samples/test1.py @@ -1,11 +1,15 @@ from prompt_toolkit import prompt -from prompt_toolkit.completion import WordCompleter + def main(): - branch_name_suggestion = 'feat/name' + branch_name_suggestion = "feat/name" print("Great, sounds like a useful feature.") - branch_name = prompt('Please confirm or edit the feature branch name: ', default=branch_name_suggestion) - print(f'Creating feature branch: {branch_name}') + branch_name = prompt( + "Please confirm or edit the feature branch name: ", + default=branch_name_suggestion, + ) + print(f"Creating feature branch: {branch_name}") + -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/gpt_engineer/applications/interactive_cli/cli-code_samples/test2.py b/gpt_engineer/applications/interactive_cli/cli-code_samples/test2.py index 2bab069145..a3024585f5 100644 --- a/gpt_engineer/applications/interactive_cli/cli-code_samples/test2.py +++ b/gpt_engineer/applications/interactive_cli/cli-code_samples/test2.py @@ -1,27 +1,30 @@ from prompt_toolkit.shortcuts import radiolist_dialog + def main(): print("Use the arrow keys to navigate. Press Enter to select.") tasks = [ - ('0', 'Generate Whole Feature'), - ('1', 'Task A - Create a view file for account page'), - ('2', 'Task B - Make an API call to retrieve account information'), - ('3', 'Enter a custom task') + ("0", "Generate Whole Feature"), + ("1", "Task A - Create a view file for account page"), + ("2", "Task B - Make an API call to retrieve account information"), + ("3", "Enter a custom task"), ] result = radiolist_dialog( title="Suggested tasks", text="Select the task to start with, or enter a custom task:", - values=tasks + values=tasks, ).run() - if result == '3': + if result == "3": from prompt_toolkit import prompt - custom_task = prompt('Enter your custom task description: ') + + custom_task = prompt("Enter your custom task description: ") print(f"You entered a custom task: {custom_task}") else: task_description = next((desc for key, desc in tasks if key == result), None) print(f"You selected: {task_description}") -if __name__ == '__main__': + +if __name__ == "__main__": main() diff --git a/gpt_engineer/applications/interactive_cli/cli-code_samples/test3.py b/gpt_engineer/applications/interactive_cli/cli-code_samples/test3.py index 8396c1e17e..9ef776a307 100644 --- a/gpt_engineer/applications/interactive_cli/cli-code_samples/test3.py +++ b/gpt_engineer/applications/interactive_cli/cli-code_samples/test3.py @@ -1,5 +1,6 @@ from prompt_toolkit.shortcuts import radiolist_dialog + def main(): print("Diff generated. Please Review, and stage the changes you want to keep.") @@ -8,25 +9,26 @@ def main(): title="Diff Review Options", text="Please select your action:", values=[ - ('r', 'Retry'), - ('s', 'Stage changes and continue'), - ('c', 'Commit changes and continue'), - ('u', 'Undo') - ] + ("r", "Retry"), + ("s", "Stage changes and continue"), + ("c", "Commit changes and continue"), + ("u", "Undo"), + ], ).run() # Handle the user's choice - if result == 'r': + if result == "r": print("You have chosen to retry the diff generation.") # Add logic to retry generating the diff - elif result == 's': + elif result == "s": print("You have chosen to stage the changes.") # Add logic to stage changes - elif result == 'c': + elif result == "c": print("You have chosen to commit the changes.") # Add logic to commit changes else: print("Operation cancelled.") -if __name__ == '__main__': + +if __name__ == "__main__": main() diff --git a/gpt_engineer/applications/interactive_cli/cli-code_samples/test4.py b/gpt_engineer/applications/interactive_cli/cli-code_samples/test4.py index a9c9a9732a..1dc23b0872 100644 --- a/gpt_engineer/applications/interactive_cli/cli-code_samples/test4.py +++ b/gpt_engineer/applications/interactive_cli/cli-code_samples/test4.py @@ -1,35 +1,36 @@ from prompt_toolkit import PromptSession from prompt_toolkit.completion import WordCompleter + def main(): print("Diff generated. Please Review, and stage the changes you want to keep.") # Define the options and create a completer with those options - options = {'r': 'Retry', 's': 'Stage changes and continue', 'c': 'Commit changes and continue', 'u': 'Undo'} - completer = WordCompleter(['r', 's', 'c', 'u'], ignore_case=True) + completer = WordCompleter(["r", "s", "c", "u"], ignore_case=True) session = PromptSession() # Using prompt to get user input result = session.prompt( "Please select your action \n r: Retry \n s: Stage \n c: Commit \n u: Undo \n\n", - completer=completer + completer=completer, ).lower() # Handle the user's choice - if result == 'r': + if result == "r": print("You have chosen to retry the diff generation.") # Add logic to retry generating the diff - elif result == 's': + elif result == "s": print("You have chosen to stage the changes.") # Add logic to stage changes - elif result == 'c': + elif result == "c": print("You have chosen to commit the changes.") # Add logic to commit changes - elif result == 'u': + elif result == "u": print("Undo the last operation.") # Add logic to undo the last operation else: print("Invalid option selected, please run the program again.") -if __name__ == '__main__': + +if __name__ == "__main__": main() diff --git a/gpt_engineer/applications/interactive_cli/example_project/.feature/description b/gpt_engineer/applications/interactive_cli/example_project/.feature/description index 7e4abfae75..0056c752b3 100644 --- a/gpt_engineer/applications/interactive_cli/example_project/.feature/description +++ b/gpt_engineer/applications/interactive_cli/example_project/.feature/description @@ -1 +1 @@ -I want to create a feedback form on the website to collect user feedback \ No newline at end of file +I want to create a feedback form on the website to collect user feedback diff --git a/gpt_engineer/applications/interactive_cli/example_project/.feature/files.yml b/gpt_engineer/applications/interactive_cli/example_project/.feature/files.yml index 9a3f1ddc74..1b3fd34e4c 100644 --- a/gpt_engineer/applications/interactive_cli/example_project/.feature/files.yml +++ b/gpt_engineer/applications/interactive_cli/example_project/.feature/files.yml @@ -1,5 +1,5 @@ # Complete list of files shared with the AI -# Please comment out any files not needed as context for this change +# Please comment out any files not needed as context for this change # This saves money and avoids overwhelming the AI - .gitignore #- 'README.md' diff --git a/gpt_engineer/applications/interactive_cli/example_project/.feature/progress.json b/gpt_engineer/applications/interactive_cli/example_project/.feature/progress.json index 0fe5788006..6238f820a0 100644 --- a/gpt_engineer/applications/interactive_cli/example_project/.feature/progress.json +++ b/gpt_engineer/applications/interactive_cli/example_project/.feature/progress.json @@ -3,4 +3,4 @@ "task 1", "task 2" ] -} \ No newline at end of file +} diff --git a/gpt_engineer/applications/interactive_cli/example_project/.feature/task b/gpt_engineer/applications/interactive_cli/example_project/.feature/task index f75456aaa6..3dbc48c050 100644 --- a/gpt_engineer/applications/interactive_cli/example_project/.feature/task +++ b/gpt_engineer/applications/interactive_cli/example_project/.feature/task @@ -1,5 +1,5 @@ -Create a local sql lite database for development. +Create a local sql lite database for development. -Initialize it with a feedback table. +Initialize it with a feedback table. -The table can store a comment and a rating out of 5 \ No newline at end of file +The table can store a comment and a rating out of 5 diff --git a/gpt_engineer/applications/interactive_cli/example_project/README.md b/gpt_engineer/applications/interactive_cli/example_project/README.md index b55fe812a9..8f6e5c23cd 100644 --- a/gpt_engineer/applications/interactive_cli/example_project/README.md +++ b/gpt_engineer/applications/interactive_cli/example_project/README.md @@ -16,4 +16,4 @@ This project is the codebase for a static website for a Local Bakery. It's desig To get a local copy up and running, follow these simple steps: ### Prerequisites -- Any modern web browser (e.g., Chrome, Firefox, Safari, or Edge). \ No newline at end of file +- Any modern web browser (e.g., Chrome, Firefox, Safari, or Edge). diff --git a/gpt_engineer/applications/interactive_cli/example_project/index.html b/gpt_engineer/applications/interactive_cli/example_project/index.html index ef3a354dce..1dcaa1d780 100644 --- a/gpt_engineer/applications/interactive_cli/example_project/index.html +++ b/gpt_engineer/applications/interactive_cli/example_project/index.html @@ -34,7 +34,7 @@

Contact Us

Address: 123 Baking St, Foodtown, TX

Email: contact@sweettreatsbakery.com

- +

Thank you for visiting our website! Follow us on social media for updates.

diff --git a/gpt_engineer/applications/interactive_cli/example_project/styles.css b/gpt_engineer/applications/interactive_cli/example_project/styles.css index ab9b9c97bc..34594dbb21 100644 --- a/gpt_engineer/applications/interactive_cli/example_project/styles.css +++ b/gpt_engineer/applications/interactive_cli/example_project/styles.css @@ -6,7 +6,7 @@ body { background: #f4f4f4; color: #333; display: flex; - flex-direction: column; + flex-direction: column; } header { diff --git a/gpt_engineer/applications/interactive_cli/feature.py b/gpt_engineer/applications/interactive_cli/feature.py index 7c75cf96ab..58fbf9fe4d 100644 --- a/gpt_engineer/applications/interactive_cli/feature.py +++ b/gpt_engineer/applications/interactive_cli/feature.py @@ -1,20 +1,22 @@ import json + from pathlib import Path from typing import Union from gpt_engineer.core.default.disk_memory import DiskMemory + class Feature(DiskMemory): """ - Represents a ticket which will be developed incrementally, - + Represents a ticket which will be developed incrementally, + Includes with a feature (overal description of the change), - a task (current incremental work item), + a task (current incremental work item), and progress (history of incremental work completed) """ - def __init__(self, project_path: Union[str, Path]): - super().__init__(Path(project_path) / ".feature") + def __init__(self, project_path: Union[str, Path]): + super().__init__(Path(project_path) / ".feature") def clear_feature(self) -> None: self.set_description("") @@ -31,7 +33,7 @@ def get_description(self) -> str: The content of the feature file. """ return super().__getitem__("description") - + def set_description(self, feature_description: str): """ Updates the feature file with new text. @@ -63,10 +65,10 @@ def update_progress(self, task: str): feature_description : str The new feature_description to write to the feature file. """ - progress= self.get_progress() - new_progress = progress['done'].append(task) + progress = self.get_progress() + new_progress = progress["done"].append(task) super().__setitem__("progress.json", json.dumps(new_progress, indent=4)) - + def set_task(self, task: str): """ Updates the task file with new text. @@ -76,7 +78,7 @@ def set_task(self, task: str): task : str The new task to write to the feature file. """ - super().__setitem__("task",task) + super().__setitem__("task", task) def get_task(self) -> str: """ @@ -89,7 +91,6 @@ def get_task(self) -> str: """ return super().__getitem__("task") - def complete_task(self): """ Moves the current task to the 'done' list in the progress.json file and clears the task file. @@ -99,4 +100,3 @@ def complete_task(self): if task: self.update_progress(task) self.set_task("") - diff --git a/gpt_engineer/applications/interactive_cli/file_selection.py b/gpt_engineer/applications/interactive_cli/file_selection.py index 8e57e5e9e2..6802c41d40 100644 --- a/gpt_engineer/applications/interactive_cli/file_selection.py +++ b/gpt_engineer/applications/interactive_cli/file_selection.py @@ -1,32 +1,34 @@ import os -import subprocess -import yaml import platform -import yaml +import subprocess + from typing import List, Tuple +import yaml + class FileSelection: """ Manages the active files in a project directory and creates a YAML file listing them. """ + def __init__(self, project_path: str, repository): self.repository = repository - self.yaml_path = os.path.join(project_path, '.feature', 'files.yml') + self.yaml_path = os.path.join(project_path, ".feature", "files.yml") self._initialize() def _create_nested_structure_from_file_paths(self, files_paths): files_paths.sort() file_structure = [] for filepath in files_paths: - parts = filepath.split('/') + parts = filepath.split("/") # Filter out the '.ticket' directory from paths - if '.ticket' in parts or '.feature' in parts: + if ".ticket" in parts or ".feature" in parts: continue node = file_structure for i, part in enumerate(parts[:-1]): # Append '/' to part if it's a directory - directory = part if part.endswith('/') else part + '/' + directory = part if part.endswith("/") else part + "/" found = False for item in node: if isinstance(item, dict) and directory in item: @@ -36,22 +38,26 @@ def _create_nested_structure_from_file_paths(self, files_paths): if not found: new_node = [] # Insert directory at the correct position (before any file) - index = next((idx for idx, item in enumerate(node) if isinstance(item, str)), len(node)) + index = next( + (idx for idx, item in enumerate(node) if isinstance(item, str)), + len(node), + ) node.insert(index, {directory: new_node}) node = new_node # Add the file to the last node, ensuring directories are listed first - if not parts[-1].endswith('/'): + if not parts[-1].endswith("/"): node.append(parts[-1]) return file_structure - def _write_yaml_with_comments(self, yaml_content): - with open(self.yaml_path, 'w') as file: - file.write(f"""# Complete list of files shared with the AI -# Please comment out any files not needed as context for this change + with open(self.yaml_path, "w") as file: + file.write( + f"""# Complete list of files shared with the AI +# Please comment out any files not needed as context for this change # This saves money and avoids overwhelming the AI -{yaml_content}""") +{yaml_content}""" + ) def _initialize(self): """ @@ -60,25 +66,28 @@ def _initialize(self): if os.path.exists(self.yaml_path): return - + print("YAML file is missing or empty, generating YAML...") - file_structure = self._create_nested_structure_from_file_paths(self.repository.get_tracked_files()) - + file_structure = self._create_nested_structure_from_file_paths( + self.repository.get_tracked_files() + ) + self._write_yaml_with_comments( - yaml.safe_dump(file_structure, default_flow_style=False, sort_keys=False, indent=2) + yaml.safe_dump( + file_structure, default_flow_style=False, sort_keys=False, indent=2 + ) ) - def _get_from_yaml(self) -> Tuple[List[str], List[str]]: - with open(self.yaml_path, 'r') as file: - original_content = file.readlines()[3:] # Skip the 3 instruction lines + with open(self.yaml_path, "r") as file: + original_content = file.readlines()[3:] # Skip the 3 instruction lines # Create a version of the content with all lines uncommented - uncommented_content = ''.join(line.lstrip('# ') for line in original_content) + uncommented_content = "".join(line.lstrip("# ") for line in original_content) # Load the original and uncommented content as YAML - original_structure = yaml.safe_load(''.join(original_content)) + original_structure = yaml.safe_load("".join(original_content)) uncommented_structure = yaml.safe_load(uncommented_content) def recurse_items(items, path=""): @@ -104,19 +113,22 @@ def recurse_items(items, path=""): excluded_files = list(set(uncommented_paths) - set(original_paths)) return (original_paths, excluded_files) - - def _set_to_yaml(self, selected_files, excluded_files): + def _set_to_yaml(self, selected_files, excluded_files): # Dont worry about commenting lines if they are no excluded files if not excluded_files: - file_structure = self._create_nested_structure_from_file_paths(selected_files) - + file_structure = self._create_nested_structure_from_file_paths( + selected_files + ) + self._write_yaml_with_comments( - yaml.safe_dump(file_structure, default_flow_style=False, sort_keys=False, indent=2) + yaml.safe_dump( + file_structure, default_flow_style=False, sort_keys=False, indent=2 + ) ) return - + all_files = list(selected_files) + list(excluded_files) current_structure = self._create_nested_structure_from_file_paths(all_files) @@ -136,24 +148,25 @@ def mark_excluded_files(structure, prefix=""): # Find all files marked for commenting - add comment and remove the mark. def comment_marked_files(yaml_content): - lines = yaml_content.split('\n') + lines = yaml_content.split("\n") updated_lines = [] for line in lines: - if '#' in line: - line = '#' + line.replace('#', '').strip() + if "#" in line: + line = "#" + line.replace("#", "").strip() updated_lines.append(line) - - return '\n'.join(updated_lines) - content = yaml.safe_dump(current_structure, default_flow_style=False, sort_keys=False, indent=2) + return "\n".join(updated_lines) + + content = yaml.safe_dump( + current_structure, default_flow_style=False, sort_keys=False, indent=2 + ) updated_content = comment_marked_files(content) self._write_yaml_with_comments(updated_content) return - def update_yaml_from_tracked_files(self): """ @@ -169,7 +182,7 @@ def update_yaml_from_tracked_files(self): # If there are no changes, do nothing if set(tracked_files) == set(selected_files + excluded_files): - print('yep') + print("yep") return new_selected_files = list(set(tracked_files) - set(excluded_files)) @@ -184,7 +197,6 @@ def get_from_yaml(self): selected_files, excluded_files = self._get_from_yaml() return selected_files - def get_pretty_from_yaml(self): """ @@ -192,7 +204,7 @@ def get_pretty_from_yaml(self): """ # Get selected files from YAML selected_files = self.get_from_yaml() - + # Helper function to insert a path into the tree dictionary def insert_path(tree, path_parts): # Recursively build nested dictionary from path parts @@ -205,21 +217,21 @@ def insert_path(tree, path_parts): # Create a nested dictionary from the list of file paths file_tree = {} for filepath in selected_files: - parts = filepath.split('/') + parts = filepath.split("/") insert_path(file_tree, parts) # Helper function to format the tree into a string with ASCII graphics - def format_tree(tree, prefix=''): + def format_tree(tree, prefix=""): lines = [] # Sorted to keep alphabetical order items = sorted(tree.items()) for i, (key, sub_tree) in enumerate(items): if i == len(items) - 1: # Last item uses └── - lines.append(prefix + '└── ' + key) - extension = ' ' + lines.append(prefix + "└── " + key) + extension = " " else: - lines.append(prefix + '├── ' + key) - extension = '│ ' + lines.append(prefix + "├── " + key) + extension = "│ " if sub_tree: lines.extend(format_tree(sub_tree, prefix=prefix + extension)) return lines @@ -228,9 +240,7 @@ def format_tree(tree, prefix=''): tree_lines = format_tree(file_tree) # Join lines and return as a string - return '\n'.join(tree_lines) - - + return "\n".join(tree_lines) def open_yaml_in_editor(self): """ @@ -239,9 +249,9 @@ def open_yaml_in_editor(self): """ # Platform-specific methods to open the file - if platform.system() == 'Windows': + if platform.system() == "Windows": os.startfile(self.yaml_path) - elif platform.system() == 'Darwin': - subprocess.run(['open', self.yaml_path]) + elif platform.system() == "Darwin": + subprocess.run(["open", self.yaml_path]) else: # Linux and other Unix-like systems - subprocess.run(['xdg-open', self.yaml_path]) + subprocess.run(["xdg-open", self.yaml_path]) diff --git a/gpt_engineer/applications/interactive_cli/files.py b/gpt_engineer/applications/interactive_cli/files.py index ee1e051986..57e3a8a080 100644 --- a/gpt_engineer/applications/interactive_cli/files.py +++ b/gpt_engineer/applications/interactive_cli/files.py @@ -1,6 +1,8 @@ -from gpt_engineer.core.files_dict import FilesDict from pathlib import Path +from gpt_engineer.core.files_dict import FilesDict + + class Files(FilesDict): def __init__(self, project_path: str, selected_files: list): """ @@ -16,10 +18,12 @@ def __init__(self, project_path: str, selected_files: list): content_dict = {} for file_path in selected_files: try: - with open(Path(project_path) / file_path, "r", encoding="utf-8") as content: + with open( + Path(project_path) / file_path, "r", encoding="utf-8" + ) as content: content_dict[str(file_path)] = content.read() except FileNotFoundError: print(f"Warning: File not found {file_path}") except UnicodeDecodeError: print(f"Warning: File not UTF-8 encoded {file_path}, skipping") - super().__init__(content_dict) \ No newline at end of file + super().__init__(content_dict) diff --git a/gpt_engineer/applications/interactive_cli/generation_tools.py b/gpt_engineer/applications/interactive_cli/generation_tools.py index d55ae366fe..ad3487840b 100644 --- a/gpt_engineer/applications/interactive_cli/generation_tools.py +++ b/gpt_engineer/applications/interactive_cli/generation_tools.py @@ -1,25 +1,29 @@ -from gpt_engineer.core.ai import AI -from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler import xml.etree.ElementTree as ET +from feature import Feature +from files import Files +from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler +from repository import GitContext + +from gpt_engineer.core.ai import AI def generate_branch_name(ai: AI, feature_description: str) -> str: system_prompt = """ You are a branch name autocomplete / suggestion tool. Based on the users input, please respond with a single suggestion of a branch name and notthing else. - Example: + Example: Input: I want to add a login button - Output: feature/login-button + Output: feature/login-button """ - - ai.llm.callbacks.clear() # silent - - messages = ai.start(system_prompt, feature_description,step_name="name-branch") + + ai.llm.callbacks.clear() # silent + + messages = ai.start(system_prompt, feature_description, step_name="name-branch") ai.llm.callbacks.append(StreamingStdOutCallbackHandler()) - + return messages[-1].content.strip() @@ -36,32 +40,62 @@ def __str__(self): def parse_task_xml_to_class(xml_data): # Parse the XML data root = ET.fromstring(xml_data) - + # Extract the planning thoughts - planning_thoughts = root.find('PlanningThoughts').text.strip() + planning_thoughts = root.find("PlanningThoughts").text.strip() # Extract tasks - tasks = [task.text.strip() for task in root.findall('.//Task')] - + tasks = [task.text.strip() for task in root.findall(".//Task")] + # Extract closing remarks - closing_remarks = root.find('ClosingRemarks').text.strip() - + closing_remarks = root.find("ClosingRemarks").text.strip() + # Create an instance of the response class response = TaskResponse(planning_thoughts, tasks, closing_remarks) - + return response -def generate_suggested_tasks(ai: AI, input: str) -> str: +def build_context_string(feature: Feature, git_context: GitContext): + return f"""I am working on a feature but breaking it up into small incremental tasks. Your job is to complete the incremental task provided to you - only that task and nothign more. + +The purpose of this message is to give you wider context around the feature you are working on and what incremental tasks have already been completed so far. + +## Feature - this is the description fo the current feature we are working on. +{feature.get_description()} + +## Completed Tasks - these are the lists of tasks you have completed so far on the feature branch. +{feature.get_progress()["done"]} + +## Git Context - these are the code changes made so far while implementing this feature. This may include work completed by you on previous tasks as well as changes made independently by me. +### Branch Changes - this is the cumulative diff of all the commits so far on the feature branch. +{git_context.branch_changes} + +### Staged Changes - this is the diff of the current staged changes. +{git_context.staged_changes} +""" + + +def build_files_context_string(feature, git_context, files: Files): + return f"""{build_context_string(feature, git_context)} + +## Current Codebase - this is the as is view of the current code base including any unstaged changes. +{files.to_chat()} +""" + + +def generate_suggested_tasks( + ai: AI, feature: Feature, git_context: GitContext, files: Files +) -> str: system_prompt = """ You are a software engineer work planning tool. Given a feature description, a list of tasks already completed, and sections of the code repository we are working on, suggest a list of tasks to be done in order to move towards the end goal of completing the feature. -First start by outputting your planning thoughts: an overview of what we are trying to achieve, what we have achieved so far, and what is left to be done. +First start by outputting your planning thoughts: an overview of what we are trying to achieve, what we have achieved so far, and what is left to be done. -Then output the list of tasks to be done. Please try to keep the tasks small, actionable and independantly commitable. +Then output the list of tasks to be done. Please try to keep the tasks small, actionable and independantly commitable. -The output format will be XML as follows: +The output format will be XML as follows: @@ -82,14 +116,15 @@ def generate_suggested_tasks(ai: AI, input: str) -> str: Respond in XML and nothing else. """ - + + input = build_files_context_string(feature, git_context, files) + # ai.llm.callbacks.clear() # silent - - messages = ai.start(system_prompt, input,step_name="suggest-tasks") + + messages = ai.start(system_prompt, input, step_name="suggest-tasks") # ai.llm.callbacks.append(StreamingStdOutCallbackHandler()) - + xml = messages[-1].content.strip() return parse_task_xml_to_class(xml).tasks - diff --git a/gpt_engineer/applications/interactive_cli/main.py b/gpt_engineer/applications/interactive_cli/main.py index 8e18049224..073413311c 100644 --- a/gpt_engineer/applications/interactive_cli/main.py +++ b/gpt_engineer/applications/interactive_cli/main.py @@ -1,64 +1,20 @@ import typer -from dotenv import load_dotenv - -from prompt_toolkit import prompt -from prompt_toolkit.validation import Validator, ValidationError - -from gpt_engineer.core.ai import AI -from gpt_engineer.core.prompt import Prompt -from generation_tools import generate_suggested_tasks -from repository import Repository, GitContext -from file_selection import FileSelection -from files import Files from agent import FeatureAgent +from dotenv import load_dotenv from feature import Feature +from repository import Repository -app = typer.Typer() - - -def get_contenxt_string(feature:Feature ,git_context:GitContext): - return f"""I am working on a feature but breaking it up into small incremental tasks. Your job is to complete the incremental task provided to you - only that task and nothign more. - -The purpose of this message is to give you wider context around the feature you are working on and what incremental tasks have already been completed so far. - -## Feature - this is the description fo the current feature we are working on. -{feature.get_description()} - -## Completed Tasks - these are the lists of tasks you have completed so far on the feature branch. -{feature.get_progress()["done"]} - -## Git Context - these are the code changes made so far while implementing this feature. This may include work completed by you on previous tasks as well as changes made independently by me. -### Branch Changes - this is the cumulative diff of all the commits so far on the feature branch. -{git_context.branch_changes} - -### Staged Changes - this is the diff of the current staged changes. -{git_context.staged_changes} -""" - -def get_full_context_string(ticket, git_context, files: Files): - return f"""{get_contenxt_string(ticket, git_context)} - -## Current Codebase - this is the as is view of the current code base including any unstaged changes. -{files.to_chat()} -""" - - -def choose_next_task(ai, ticket, git_context: GitContext, files: Files): - print(f"There are {len(ticket.progress.done)} tasks completed so far. What shall we do next?") - - context_string = get_full_context_string(ticket, git_context, files) +from gpt_engineer.core.ai import AI - suggested_tasks_xml = generate_suggested_tasks() +app = typer.Typer() @app.command() def main( project_path: str = typer.Argument(".", help="path"), model: str = typer.Argument("gpt-4-turbo", help="model id string"), - new: bool = typer.Option( - False, "--new", "-n", help="Initialize new feature." - ), + new: bool = typer.Option(False, "--new", "-n", help="Initialize new feature."), temperature: float = typer.Option( 0.1, "--temperature", @@ -77,10 +33,10 @@ def main( ), debug: bool = typer.Option( False, "--debug", "-d", help="Enable debug mode for debugging." - ) + ), ): """ - Run GPTE Interactive Improve + Run GPTE Interactive Improve """ load_dotenv() @@ -105,6 +61,6 @@ def main( else: agent.resume() - + if __name__ == "__main__": - app() \ No newline at end of file + app() diff --git a/gpt_engineer/applications/interactive_cli/repository.py b/gpt_engineer/applications/interactive_cli/repository.py index 81d50e00c2..fe7dd852df 100644 --- a/gpt_engineer/applications/interactive_cli/repository.py +++ b/gpt_engineer/applications/interactive_cli/repository.py @@ -1,14 +1,15 @@ from dataclasses import dataclass from typing import List -from git import Repo, GitCommandError -import os +from git import GitCommandError, Repo + @dataclass class Commit: """ Represents a single Git commit with a description and a diff. """ + description: str diff: str @@ -22,6 +23,7 @@ class GitContext: """ Represents the Git context of an in progress feature. """ + commits: List[Commit] branch_changes: str staged_changes: str @@ -34,6 +36,7 @@ class Repository: Manages a git repository, providing functionalities to get repo status, list files considering .gitignore, and interact with repository history. """ + def __init__(self, repo_path: str): self.repo_path = repo_path self.repo = Repo(repo_path) @@ -44,16 +47,16 @@ def get_tracked_files(self) -> List[str]: List all files that are currently tracked by Git in the repository. """ try: - tracked_files = self.repo.git.ls_files().split('\n') + tracked_files = self.repo.git.ls_files().split("\n") return tracked_files except GitCommandError as e: print(f"Error listing tracked files: {e}") return [] - + def get_feature_branch_diff(self): """ Get a consolidated diff for the entire feature branch from its divergence point. - + Returns: - str: The diff representing all changes from the feature branch since its divergence. """ @@ -69,18 +72,21 @@ def get_feature_branch_diff(self): # Find the merge base between the current branch and the tracking branch or master merge_base = self.repo.merge_base(tracking_branch, current_branch) if merge_base: - merge_base = merge_base[0] # GitPython might return a list of merge bases + merge_base = merge_base[ + 0 + ] # GitPython might return a list of merge bases # Generate the diff from the merge base to the latest commit of the feature branch - feature_diff = self.repo.git.diff(f"{merge_base}..{current_branch}", unified=0) + feature_diff = self.repo.git.diff( + f"{merge_base}..{current_branch}", unified=0 + ) return feature_diff except GitCommandError as e: print(f"Error generating diff: {e}") return "" - def get_git_context(self): - staged_changes = self.repo.git.diff('--cached') + staged_changes = self.repo.git.diff("--cached") unstaged_changes = self.repo.git.diff() current_branch = self.repo.active_branch @@ -89,7 +95,11 @@ def get_git_context(self): commit_objects = [ Commit( commit.summary, - commit.diff(commit.parents[0], create_patch=True) if commit.parents else commit.diff(None, create_patch=True) + ( + commit.diff(commit.parents[0], create_patch=True) + if commit.parents + else commit.diff(None, create_patch=True) + ), ) for commit in commits ] @@ -98,4 +108,10 @@ def get_git_context(self): tracked_files = self.get_tracked_files() - return GitContext(commit_objects, branch_changes, staged_changes, unstaged_changes, tracked_files) \ No newline at end of file + return GitContext( + commit_objects, + branch_changes, + staged_changes, + unstaged_changes, + tracked_files, + ) diff --git a/gpt_engineer/core/default/steps.py b/gpt_engineer/core/default/steps.py index 6051358b24..8ff8f783ed 100644 --- a/gpt_engineer/core/default/steps.py +++ b/gpt_engineer/core/default/steps.py @@ -37,7 +37,7 @@ import traceback from pathlib import Path -from typing import List, MutableMapping, Union, Optional +from typing import List, MutableMapping, Optional, Union from langchain.schema import HumanMessage, SystemMessage from termcolor import colored @@ -303,7 +303,7 @@ def improve_fn( messages = [ SystemMessage(content=setup_sys_prompt_existing_code(preprompts)), ] - + if additional_context: messages.append(HumanMessage(content=additional_context)) diff --git a/gpt_engineer/core/prompt.py b/gpt_engineer/core/prompt.py index 4d8286343c..85e2e10d75 100644 --- a/gpt_engineer/core/prompt.py +++ b/gpt_engineer/core/prompt.py @@ -9,16 +9,18 @@ def __init__( text: str, image_urls: Optional[Dict[str, str]] = None, entrypoint_prompt: str = "", + prefix: Optional[str] = "Request: ", ): self.text = text self.image_urls = image_urls self.entrypoint_prompt = entrypoint_prompt + self.prefix = prefix def __repr__(self): return f"Prompt(text={self.text!r}, image_urls={self.image_urls!r})" - def to_langchain_content(self): - content = [{"type": "text", "text": f"Request: {self.text}"}] + def to_langchain_content(self) -> Dict[str, str]: + content = [{"type": "text", "text": f"{self.prefix}{self.text}"}] if self.image_urls: for name, url in self.image_urls.items(): From f43e288332b3a5569634211f5e553cf4765eb91b Mon Sep 17 00:00:00 2001 From: Theo McCabe Date: Mon, 13 May 2024 19:02:06 +0100 Subject: [PATCH 12/36] wip --- .../applications/interactive_cli/agent.py | 87 +++---------- .../interactive_cli/agent_steps.py | 114 ++++++++++++++++++ gpt_engineer/core/default/steps.py | 1 - 3 files changed, 131 insertions(+), 71 deletions(-) create mode 100644 gpt_engineer/applications/interactive_cli/agent_steps.py diff --git a/gpt_engineer/applications/interactive_cli/agent.py b/gpt_engineer/applications/interactive_cli/agent.py index f7efd9d86d..afa40d6be1 100644 --- a/gpt_engineer/applications/interactive_cli/agent.py +++ b/gpt_engineer/applications/interactive_cli/agent.py @@ -1,29 +1,23 @@ from feature import Feature from file_selection import FileSelection -from files import Files -from generation_tools import build_context_string, generate_branch_name -from prompt_toolkit import prompt as cli_input -from prompt_toolkit.validation import ValidationError, Validator from repository import Repository +from files import Files +from agent_steps import ( + initialize_new_feature, + update_user_file_selection, + check_for_unstaged_changes, + confirm_task_and_context_with_user, +) +from generation_tools import build_context_string from gpt_engineer.core.ai import AI from gpt_engineer.core.base_agent import BaseAgent from gpt_engineer.core.default.disk_memory import DiskMemory from gpt_engineer.core.default.paths import PREPROMPTS_PATH, memory_path -from gpt_engineer.core.default.steps import improve_fn from gpt_engineer.core.preprompts_holder import PrepromptsHolder from gpt_engineer.core.prompt import Prompt -class FeatureValidator(Validator): - def validate(self, document): - text = document.text - if not text: - raise ValidationError( - message="Feature description cannot be empty", cursor_position=len(text) - ) - - class FeatureAgent(BaseAgent): """ A cli agent which implements a feature as a set of incremental tasks @@ -46,71 +40,24 @@ def __init__( self.preprompts_holder = PrepromptsHolder(PREPROMPTS_PATH) def init(self): - self.feature.clear_feature() - - feature_description = cli_input( - "Write feature description: ", - multiline=True, - validator=FeatureValidator(), - bottom_toolbar="Press Ctrl+O to finish", - ) - - self.feature.set_description(feature_description) - - # print("\n Ticket files created at .ticket \n ") - - branch_name = generate_branch_name(self.ai, feature_description) - - branch_name = cli_input("\nConfirm branch name: ", default=branch_name) - - # todo: use gitpython to create new branch. - print("\nFeature branch created.\n") + initialize_new_feature(self.feature) - self.file_selection.update_yaml_from_tracked_files() - self.file_selection.open_yaml_in_editor() - input("Please edit the YAML file and then press Enter to continue...") + update_user_file_selection(self.file_selection) self.resume() - def improve(self): - self.resume() - def resume(self): - git_context = self.repository.get_git_context() - if git_context.unstaged_changes: - if input( - "Unstaged changes present are you sure you want to proceed? y/n" - ).lower() not in ["", "y", "yes"]: - print("Ok, not proceeding.") - return + check_for_unstaged_changes(self.repository) - self.file_selection.update_yaml_from_tracked_files() + confirm_task_and_context_with_user(self.feature, self.file_selection) - context_string = build_context_string(self.feature, git_context) + context_string = build_context_string( + self.feature, self.repository.get_git_context() + ) files = Files(self.project_path, self.file_selection.get_from_yaml()) - feature = self.feature.get_description() - file_string = self.file_selection.get_pretty_from_yaml() - task = self.feature.get_task() - - # list feature, files and task - print(f"Feature: {feature}\n\n") - print(f"Files: {file_string}\n\n") - print(f"Task: {task}\n\n") - - # do you want to attempt this task? - if cli_input( - "Do you want to attempt this task? y/n: ", default="y" - ).lower() not in ["y", "yes"]: - print("Ok, not proceeding. Perhaps you should update the feature and retry") - return - # if no: do you want to edit feature? edit task? complete? or cancel? - - prompt = Prompt(task, prefix="Task: ") - - improve_fn( - self.ai, prompt, files, self.memory, self.preprompts_holder, context_string - ) + def improve(self): + self.resume() diff --git a/gpt_engineer/applications/interactive_cli/agent_steps.py b/gpt_engineer/applications/interactive_cli/agent_steps.py new file mode 100644 index 0000000000..242bebbe24 --- /dev/null +++ b/gpt_engineer/applications/interactive_cli/agent_steps.py @@ -0,0 +1,114 @@ +from feature import Feature +from file_selection import FileSelection +from repository import Repository +from files import Files +from generation_tools import generate_branch_name + + +from gpt_engineer.core.ai import AI +from gpt_engineer.core.prompt import Prompt +from gpt_engineer.core.default.steps import improve_fn + +from prompt_toolkit import prompt as cli_input +from prompt_toolkit.validation import ValidationError, Validator + + +class FeatureValidator(Validator): + def validate(self, document): + text = document.text + if not text: + raise ValidationError( + message="Feature description cannot be empty", cursor_position=len(text) + ) + + +def initialize_new_feature(ai: AI, feature: Feature): + feature.clear_feature() + + feature_description = cli_input( + "Write feature description: ", + multiline=True, + validator=FeatureValidator(), + bottom_toolbar="Press Ctrl+O to finish", + ) + + feature.set_description(feature_description) + + # print("\n Ticket files created at .ticket \n ") + + branch_name = generate_branch_name(ai, feature_description) + + branch_name = cli_input("\nConfirm branch name: ", default=branch_name) + + # todo: use gitpython to create new branch. + + print("\nFeature branch created.\n") + + +def update_user_file_selection(file_selection: FileSelection): + file_selection.update_yaml_from_tracked_files() + file_selection.open_yaml_in_editor() + input("Please edit the YAML file and then press Enter to continue...") + + +def check_for_unstaged_changes( + repository: Repository, +): + git_context = repository.get_git_context() + + if git_context.unstaged_changes: + if input( + "Unstaged changes present are you sure you want to proceed? y/n" + ).lower() not in ["", "y", "yes"]: + print("Ok, not proceeding.") + return + + +def confirm_task_and_context_with_user(feature: Feature, file_selection: FileSelection): + file_selection.update_yaml_from_tracked_files() + + feature_description = feature.get_description() + file_string = file_selection.get_pretty_from_yaml() + task = feature.get_task() + + # list feature, files and task + print(f"Feature: {feature_description}\n\n") + print(f"Files: {file_string}\n\n") + print(f"Task: {task}\n\n") + + # do you want to attempt this task? + if cli_input("Do you want to attempt this task? y/n: ").lower() not in [ + "y", + "yes", + ]: + print("Ok, not proceeding. Perhaps you should update the feature and retry") + return + # TODO: if no: do you want to edit feature? edit task? complete? or cancel? + + +def run_improve_function(ai: AI, feature: Feature, files: Files): + + prompt = Prompt(feature.get_task(), prefix="Task: ") + + # WIP! + + +# improve_fn( +# self.ai, prompt, files, self.memory, self.preprompts_holder, context_string +# ) + +# files_dict_before = FileSelector(project_path).ask_for_files() +# files_dict = handle_improve_mode(prompt, agent, memory, files_dict_before) +# if not files_dict or files_dict_before == files_dict: +# print( +# f"No changes applied. Could you please upload the debug_log_file.txt in {memory.path} folder in a github issue?" +# ) + +# else: +# print("\nChanges to be made:") +# compare(files_dict_before, files_dict) + +# print() +# print(colored("Do you want to apply these changes?", "light_green")) +# if not prompt_yesno(): +# files_dict = files_dict_before diff --git a/gpt_engineer/core/default/steps.py b/gpt_engineer/core/default/steps.py index 8ff8f783ed..510c065af3 100644 --- a/gpt_engineer/core/default/steps.py +++ b/gpt_engineer/core/default/steps.py @@ -393,7 +393,6 @@ def handle_improve_mode(prompt, agent, memory, files_dict): # Get the captured output captured_string = captured_output.getvalue() - print(captured_string) memory.log(DEBUG_LOG_FILE, "\nCONSOLE OUTPUT:\n" + captured_string) return files_dict From 689a0c254cc0c68624fe9779ec6d5b2b012c8fde Mon Sep 17 00:00:00 2001 From: Theo McCabe Date: Tue, 14 May 2024 09:22:02 +0100 Subject: [PATCH 13/36] works with a task given --- gpt_engineer/applications/cli/main.py | 5 +- .../applications/interactive_cli/agent.py | 23 ++-- .../interactive_cli/agent_steps.py | 102 ++++++++++++++---- .../applications/interactive_cli/files.py | 11 ++ gpt_engineer/core/default/steps.py | 4 +- 5 files changed, 108 insertions(+), 37 deletions(-) diff --git a/gpt_engineer/applications/cli/main.py b/gpt_engineer/applications/cli/main.py index 6cbd73d5ac..a4c031689d 100644 --- a/gpt_engineer/applications/cli/main.py +++ b/gpt_engineer/applications/cli/main.py @@ -454,7 +454,10 @@ def main( if not no_execution: if improve_mode: files_dict_before = FileSelector(project_path).ask_for_files() - files_dict = handle_improve_mode(prompt, agent, memory, files_dict_before) + + improve_lambda = lambda: agent.improve(prompt, files_dict_before) + files_dict = handle_improve_mode(improve_lambda, memory) + if not files_dict or files_dict_before == files_dict: print( f"No changes applied. Could you please upload the debug_log_file.txt in {memory.path} folder in a github issue?" diff --git a/gpt_engineer/applications/interactive_cli/agent.py b/gpt_engineer/applications/interactive_cli/agent.py index afa40d6be1..150b58ade3 100644 --- a/gpt_engineer/applications/interactive_cli/agent.py +++ b/gpt_engineer/applications/interactive_cli/agent.py @@ -6,16 +6,13 @@ initialize_new_feature, update_user_file_selection, check_for_unstaged_changes, - confirm_task_and_context_with_user, + confirm_feature_context_and_task_with_user, + run_improve_function, ) from generation_tools import build_context_string from gpt_engineer.core.ai import AI from gpt_engineer.core.base_agent import BaseAgent -from gpt_engineer.core.default.disk_memory import DiskMemory -from gpt_engineer.core.default.paths import PREPROMPTS_PATH, memory_path -from gpt_engineer.core.preprompts_holder import PrepromptsHolder -from gpt_engineer.core.prompt import Prompt class FeatureAgent(BaseAgent): @@ -36,12 +33,10 @@ def __init__( self.ai = ai or AI() self.file_selection = FileSelection(project_path, repository) - self.memory = DiskMemory(memory_path(project_path)) - self.preprompts_holder = PrepromptsHolder(PREPROMPTS_PATH) def init(self): - initialize_new_feature(self.feature) + initialize_new_feature(self.ai, self.feature) update_user_file_selection(self.file_selection) @@ -51,13 +46,15 @@ def resume(self): check_for_unstaged_changes(self.repository) - confirm_task_and_context_with_user(self.feature, self.file_selection) + confirm_feature_context_and_task_with_user(self.feature, self.file_selection) - context_string = build_context_string( - self.feature, self.repository.get_git_context() + run_improve_function( + self.project_path, + self.feature, + self.repository, + self.ai, + self.file_selection, ) - files = Files(self.project_path, self.file_selection.get_from_yaml()) - def improve(self): self.resume() diff --git a/gpt_engineer/applications/interactive_cli/agent_steps.py b/gpt_engineer/applications/interactive_cli/agent_steps.py index 242bebbe24..0d7b65df1a 100644 --- a/gpt_engineer/applications/interactive_cli/agent_steps.py +++ b/gpt_engineer/applications/interactive_cli/agent_steps.py @@ -2,15 +2,22 @@ from file_selection import FileSelection from repository import Repository from files import Files -from generation_tools import generate_branch_name +from generation_tools import generate_branch_name, build_context_string +from termcolor import colored from gpt_engineer.core.ai import AI from gpt_engineer.core.prompt import Prompt -from gpt_engineer.core.default.steps import improve_fn +from gpt_engineer.core.default.steps import improve_fn, handle_improve_mode +from gpt_engineer.core.default.disk_memory import DiskMemory +from gpt_engineer.core.default.paths import PREPROMPTS_PATH, memory_path +from gpt_engineer.core.preprompts_holder import PrepromptsHolder +from gpt_engineer.core.prompt import Prompt + from prompt_toolkit import prompt as cli_input from prompt_toolkit.validation import ValidationError, Validator +import difflib class FeatureValidator(Validator): @@ -64,7 +71,9 @@ def check_for_unstaged_changes( return -def confirm_task_and_context_with_user(feature: Feature, file_selection: FileSelection): +def confirm_feature_context_and_task_with_user( + feature: Feature, file_selection: FileSelection +): file_selection.update_yaml_from_tracked_files() feature_description = feature.get_description() @@ -86,29 +95,80 @@ def confirm_task_and_context_with_user(feature: Feature, file_selection: FileSel # TODO: if no: do you want to edit feature? edit task? complete? or cancel? -def run_improve_function(ai: AI, feature: Feature, files: Files): +def compare_files(f1: Files, f2: Files): + def colored_diff(s1, s2): + lines1 = s1.splitlines() + lines2 = s2.splitlines() + + diff = difflib.unified_diff(lines1, lines2, lineterm="") + + RED = "\033[38;5;202m" + GREEN = "\033[92m" + RESET = "\033[0m" + + colored_lines = [] + for line in diff: + if line.startswith("+"): + colored_lines.append(GREEN + line + RESET) + elif line.startswith("-"): + colored_lines.append(RED + line + RESET) + else: + colored_lines.append(line) + + return "\n".join(colored_lines) + + for file in sorted(set(f1) | set(f2)): + diff = colored_diff(f1.get(file, ""), f2.get(file, "")) + if diff: + print(f"Changes to {file}:") + print(diff) + + +def prompt_yesno() -> bool: + TERM_CHOICES = colored("y", "green") + "/" + colored("n", "red") + " " + while True: + response = input(TERM_CHOICES).strip().lower() + if response in ["y", "yes"]: + return True + if response in ["n", "no"]: + break + print("Please respond with 'y' or 'n'") + + +def run_improve_function( + project_path, + feature: Feature, + repository: Repository, + ai: AI, + file_selection: FileSelection, +): + + memory = DiskMemory(memory_path(project_path)) + preprompts_holder = PrepromptsHolder(PREPROMPTS_PATH) + + context_string = build_context_string(feature, repository.get_git_context()) prompt = Prompt(feature.get_task(), prefix="Task: ") - # WIP! + files = Files(project_path, file_selection.get_from_yaml()) + improve_lambda = lambda: improve_fn( + ai, prompt, files, memory, preprompts_holder, context_string + ) -# improve_fn( -# self.ai, prompt, files, self.memory, self.preprompts_holder, context_string -# ) + updated_files_dictionary = handle_improve_mode(improve_lambda, memory) + if not updated_files_dictionary or files == updated_files_dictionary: + print( + f"No changes applied. Could you please upload the debug_log_file.txt in {memory.path} folder in a github issue?" + ) -# files_dict_before = FileSelector(project_path).ask_for_files() -# files_dict = handle_improve_mode(prompt, agent, memory, files_dict_before) -# if not files_dict or files_dict_before == files_dict: -# print( -# f"No changes applied. Could you please upload the debug_log_file.txt in {memory.path} folder in a github issue?" -# ) + else: + print("\nChanges to be made:") + compare_files(files, updated_files_dictionary) -# else: -# print("\nChanges to be made:") -# compare(files_dict_before, files_dict) + print() + print(colored("Do you want to apply these changes?", "light_green")) + if not prompt_yesno(): + return -# print() -# print(colored("Do you want to apply these changes?", "light_green")) -# if not prompt_yesno(): -# files_dict = files_dict_before + files.write_to_disk(updated_files_dictionary) diff --git a/gpt_engineer/applications/interactive_cli/files.py b/gpt_engineer/applications/interactive_cli/files.py index 57e3a8a080..b1e7347129 100644 --- a/gpt_engineer/applications/interactive_cli/files.py +++ b/gpt_engineer/applications/interactive_cli/files.py @@ -15,6 +15,9 @@ def __init__(self, project_path: str, selected_files: list): selected_files : list List of file paths relative to the project path. """ + + self.project_path = project_path + # Convert the list of selected files and their relative directory into a dictionary of relative file paths content_dict = {} for file_path in selected_files: try: @@ -27,3 +30,11 @@ def __init__(self, project_path: str, selected_files: list): except UnicodeDecodeError: print(f"Warning: File not UTF-8 encoded {file_path}, skipping") super().__init__(content_dict) + + def write_to_disk(self, files: FilesDict): + for name, content in files.items(): + path = Path(self.project_path) / name + path.parent.mkdir(parents=True, exist_ok=True) + with open(path, "w") as f: + f.write(content) + return self diff --git a/gpt_engineer/core/default/steps.py b/gpt_engineer/core/default/steps.py index 510c065af3..055398b057 100644 --- a/gpt_engineer/core/default/steps.py +++ b/gpt_engineer/core/default/steps.py @@ -375,13 +375,13 @@ def flush(self): file.flush() -def handle_improve_mode(prompt, agent, memory, files_dict): +def handle_improve_mode(improve_lambda, memory): captured_output = io.StringIO() old_stdout = sys.stdout sys.stdout = Tee(sys.stdout, captured_output) try: - files_dict = agent.improve(files_dict, prompt) + files_dict = improve_lambda() except Exception as e: print( f"Error while improving the project: {e}\nCould you please upload the debug_log_file.txt in {memory.path} folder to github?\nFULL STACK TRACE:\n" From d91b3730f4e8a1c3a8903d1a1178190f6caa9754 Mon Sep 17 00:00:00 2001 From: Theo McCabe Date: Tue, 14 May 2024 09:22:51 +0100 Subject: [PATCH 14/36] update sub project --- .../example_project/.feature/description | 1 - .../example_project/.feature/files.yml | 7 ------- .../example_project/.feature/progress.json | 6 ------ .../example_project/.feature/task | 5 ----- .../interactive_cli/example_project/.gitignore | 2 +- .../interactive_cli/example_project/README.md | 17 +++-------------- 6 files changed, 4 insertions(+), 34 deletions(-) delete mode 100644 gpt_engineer/applications/interactive_cli/example_project/.feature/description delete mode 100644 gpt_engineer/applications/interactive_cli/example_project/.feature/files.yml delete mode 100644 gpt_engineer/applications/interactive_cli/example_project/.feature/progress.json delete mode 100644 gpt_engineer/applications/interactive_cli/example_project/.feature/task diff --git a/gpt_engineer/applications/interactive_cli/example_project/.feature/description b/gpt_engineer/applications/interactive_cli/example_project/.feature/description deleted file mode 100644 index 0056c752b3..0000000000 --- a/gpt_engineer/applications/interactive_cli/example_project/.feature/description +++ /dev/null @@ -1 +0,0 @@ -I want to create a feedback form on the website to collect user feedback diff --git a/gpt_engineer/applications/interactive_cli/example_project/.feature/files.yml b/gpt_engineer/applications/interactive_cli/example_project/.feature/files.yml deleted file mode 100644 index 1b3fd34e4c..0000000000 --- a/gpt_engineer/applications/interactive_cli/example_project/.feature/files.yml +++ /dev/null @@ -1,7 +0,0 @@ -# Complete list of files shared with the AI -# Please comment out any files not needed as context for this change -# This saves money and avoids overwhelming the AI -- .gitignore -#- 'README.md' -- index.html -- styles.css diff --git a/gpt_engineer/applications/interactive_cli/example_project/.feature/progress.json b/gpt_engineer/applications/interactive_cli/example_project/.feature/progress.json deleted file mode 100644 index 6238f820a0..0000000000 --- a/gpt_engineer/applications/interactive_cli/example_project/.feature/progress.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "done": [ - "task 1", - "task 2" - ] -} diff --git a/gpt_engineer/applications/interactive_cli/example_project/.feature/task b/gpt_engineer/applications/interactive_cli/example_project/.feature/task deleted file mode 100644 index 3dbc48c050..0000000000 --- a/gpt_engineer/applications/interactive_cli/example_project/.feature/task +++ /dev/null @@ -1,5 +0,0 @@ -Create a local sql lite database for development. - -Initialize it with a feedback table. - -The table can store a comment and a rating out of 5 diff --git a/gpt_engineer/applications/interactive_cli/example_project/.gitignore b/gpt_engineer/applications/interactive_cli/example_project/.gitignore index 8577ad31af..2ff649ee90 100644 --- a/gpt_engineer/applications/interactive_cli/example_project/.gitignore +++ b/gpt_engineer/applications/interactive_cli/example_project/.gitignore @@ -1,4 +1,4 @@ -#.feature - track this folder for testing +# .feature - track this folder for testing .gpteng ignored_test diff --git a/gpt_engineer/applications/interactive_cli/example_project/README.md b/gpt_engineer/applications/interactive_cli/example_project/README.md index 8f6e5c23cd..0fe9092f35 100644 --- a/gpt_engineer/applications/interactive_cli/example_project/README.md +++ b/gpt_engineer/applications/interactive_cli/example_project/README.md @@ -1,19 +1,8 @@ # Local Bakery Website -## Overview -This project is the codebase for a static website for a Local Bakery. It's designed to provide essential information about the bakery, including an about us section, a showcase of products, and contact information. +Try this feature improvement -## Features -- **About Us**: Learn more about the history and mission of the bakery. -- **Products**: Browse the list of baked goods we offer. -- **Contact**: Find contact details and how to reach out to us. +Feature: I want to create a feedback form on the website to collect user feedback -## File Structure -- `index.html`: The main HTML file that contains the structure of the website. -- `styles.css`: CSS file for styling the website. +Task: Create a local sql lite database for development. Initialize it with a feedback table. The table can store a comment and a rating out of 5. -## Getting Started -To get a local copy up and running, follow these simple steps: - -### Prerequisites -- Any modern web browser (e.g., Chrome, Firefox, Safari, or Edge). From 16932577db878325ebe0201a663a2bd6dc583dd5 Mon Sep 17 00:00:00 2001 From: Theo McCabe Date: Tue, 14 May 2024 09:24:11 +0100 Subject: [PATCH 15/36] ignore feature --- .../applications/interactive_cli/example_project/.gitignore | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gpt_engineer/applications/interactive_cli/example_project/.gitignore b/gpt_engineer/applications/interactive_cli/example_project/.gitignore index 2ff649ee90..d3c1171768 100644 --- a/gpt_engineer/applications/interactive_cli/example_project/.gitignore +++ b/gpt_engineer/applications/interactive_cli/example_project/.gitignore @@ -1,4 +1,4 @@ -# .feature - track this folder for testing +.feature .gpteng ignored_test From fe31d62774cd2470b29afc73915fbf9d8ce6fa12 Mon Sep 17 00:00:00 2001 From: Theo McCabe Date: Tue, 14 May 2024 21:15:19 +0100 Subject: [PATCH 16/36] push tings --- .../applications/interactive_cli/agent.py | 12 +++- .../interactive_cli/agent_steps.py | 37 +++++++++++-- .../applications/interactive_cli/feature.py | 55 ++++++++++++++++--- 3 files changed, 87 insertions(+), 17 deletions(-) diff --git a/gpt_engineer/applications/interactive_cli/agent.py b/gpt_engineer/applications/interactive_cli/agent.py index 150b58ade3..0807f4002c 100644 --- a/gpt_engineer/applications/interactive_cli/agent.py +++ b/gpt_engineer/applications/interactive_cli/agent.py @@ -8,6 +8,7 @@ check_for_unstaged_changes, confirm_feature_context_and_task_with_user, run_improve_function, + adjust_feature_task_or_files, ) from generation_tools import build_context_string @@ -44,9 +45,16 @@ def init(self): def resume(self): - check_for_unstaged_changes(self.repository) + implement = False + + while not implement: + implement = confirm_feature_context_and_task_with_user( + self.feature, self.file_selection + ) - confirm_feature_context_and_task_with_user(self.feature, self.file_selection) + adjust_feature_task_or_files() + + check_for_unstaged_changes(self.repository) run_improve_function( self.project_path, diff --git a/gpt_engineer/applications/interactive_cli/agent_steps.py b/gpt_engineer/applications/interactive_cli/agent_steps.py index 0d7b65df1a..cb1e01af26 100644 --- a/gpt_engineer/applications/interactive_cli/agent_steps.py +++ b/gpt_engineer/applications/interactive_cli/agent_steps.py @@ -5,7 +5,6 @@ from generation_tools import generate_branch_name, build_context_string from termcolor import colored - from gpt_engineer.core.ai import AI from gpt_engineer.core.prompt import Prompt from gpt_engineer.core.default.steps import improve_fn, handle_improve_mode @@ -14,7 +13,6 @@ from gpt_engineer.core.preprompts_holder import PrepromptsHolder from gpt_engineer.core.prompt import Prompt - from prompt_toolkit import prompt as cli_input from prompt_toolkit.validation import ValidationError, Validator import difflib @@ -58,6 +56,16 @@ def update_user_file_selection(file_selection: FileSelection): input("Please edit the YAML file and then press Enter to continue...") +def update_feature_description(feature: Feature): + feature.open_feature_in_editor() + input("Please edit the feature file and then press Enter to continue...") + + +def update_task_description(feature: Feature): + feature.open_feature_in_editor() + input("Please edit the feature file and then press Enter to continue...") + + def check_for_unstaged_changes( repository: Repository, ): @@ -86,14 +94,31 @@ def confirm_feature_context_and_task_with_user( print(f"Task: {task}\n\n") # do you want to attempt this task? - if cli_input("Do you want to attempt this task? y/n: ").lower() not in [ + if cli_input("Do you want to implement this task? y/n: ").lower() in [ "y", "yes", ]: - print("Ok, not proceeding. Perhaps you should update the feature and retry") - return - # TODO: if no: do you want to edit feature? edit task? complete? or cancel? + return True + + return False + + +def check_if_task_is_complete(): + + # feature.complete task + # then + + +def adjust_feature_task_or_files(): + # todo : create a function which uses the test4.py example code approach to offer a selection of options to the user + + # c - complete the task and start a new one + + # f - "edit feature" using update_feature_description step + # s - "edit file selection" using update_user_file_selection step + # t - "edit task" using update_task_description step + # def compare_files(f1: Files, f2: Files): def colored_diff(s1, s2): diff --git a/gpt_engineer/applications/interactive_cli/feature.py b/gpt_engineer/applications/interactive_cli/feature.py index 58fbf9fe4d..bd42e9e86a 100644 --- a/gpt_engineer/applications/interactive_cli/feature.py +++ b/gpt_engineer/applications/interactive_cli/feature.py @@ -1,5 +1,7 @@ import json - +import os +import platform +import subprocess from pathlib import Path from typing import Union @@ -16,12 +18,18 @@ class Feature(DiskMemory): """ def __init__(self, project_path: Union[str, Path]): - super().__init__(Path(project_path) / ".feature") + + self.feature_path = Path(project_path) / ".feature" + self.feature_filename = "description.md" + self.progress_filename = "progress.json" + self.task_filename = "task.md" + + super().__init__(self.feature_path) def clear_feature(self) -> None: self.set_description("") self.set_task("") - super().__setitem__("progress.json", json.dumps({"done": []})) + super().__setitem__(self.progress_filename, json.dumps({"done": []})) def get_description(self) -> str: """ @@ -32,7 +40,7 @@ def get_description(self) -> str: str The content of the feature file. """ - return super().__getitem__("description") + return super().__getitem__(self.feature_filename) def set_description(self, feature_description: str): """ @@ -43,7 +51,7 @@ def set_description(self, feature_description: str): feature_description : str The new feature_description to write to the feature file. """ - super().__setitem__("description", feature_description) + super().__setitem__(self.feature_filename, feature_description) def get_progress(self) -> dict: """ @@ -54,7 +62,7 @@ def get_progress(self) -> dict: str The content of the feature file. """ - return json.loads(super().__getitem__("progress.json")) + return json.loads(super().__getitem__(self.progress_filename)) def update_progress(self, task: str): """ @@ -67,7 +75,7 @@ def update_progress(self, task: str): """ progress = self.get_progress() new_progress = progress["done"].append(task) - super().__setitem__("progress.json", json.dumps(new_progress, indent=4)) + super().__setitem__(self.progress_filename, json.dumps(new_progress, indent=4)) def set_task(self, task: str): """ @@ -78,7 +86,7 @@ def set_task(self, task: str): task : str The new task to write to the feature file. """ - super().__setitem__("task", task) + super().__setitem__(self.task_filename, task) def get_task(self) -> str: """ @@ -89,7 +97,7 @@ def get_task(self) -> str: str The content of the feature file. """ - return super().__getitem__("task") + return super().__getitem__(self.task_filename) def complete_task(self): """ @@ -100,3 +108,32 @@ def complete_task(self): if task: self.update_progress(task) self.set_task("") + + def _file_path(self, filename): + return self.feature_path / filename + + def _open_file_in_editor(self, path): + """ + Opens the generated YAML file in the default system editor. + If the YAML file is empty or doesn't exist, generate it first. + """ + + # Platform-specific methods to open the file + if platform.system() == "Windows": + os.startfile(path) + elif platform.system() == "Darwin": + subprocess.run(["open", path]) + else: # Linux and other Unix-like systems + subprocess.run(["xdg-open", path]) + + def open_feature_in_editor(self): + """ + Opens the feature file in the default system editor. + """ + self._open_file_in_editor(self._file_path(self.feature_filename)) + + def open_task_in_editor(self): + """ + Opens the task file in the default system editor. + """ + self._open_file_in_editor(self._file_path(self.task_filename)) From 1398726aad1ec6012ebd4fee9613b20b27055ee9 Mon Sep 17 00:00:00 2001 From: Theo McCabe Date: Tue, 14 May 2024 21:24:40 +0100 Subject: [PATCH 17/36] fix main cli app --- gpt_engineer/applications/cli/main.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gpt_engineer/applications/cli/main.py b/gpt_engineer/applications/cli/main.py index a4c031689d..c48d806964 100644 --- a/gpt_engineer/applications/cli/main.py +++ b/gpt_engineer/applications/cli/main.py @@ -455,7 +455,7 @@ def main( if improve_mode: files_dict_before = FileSelector(project_path).ask_for_files() - improve_lambda = lambda: agent.improve(prompt, files_dict_before) + improve_lambda = lambda: agent.improve(files_dict_before, prompt) files_dict = handle_improve_mode(improve_lambda, memory) if not files_dict or files_dict_before == files_dict: From eda60e0a2dd6641313d1c85b2c773647b34fce84 Mon Sep 17 00:00:00 2001 From: Theo McCabe Date: Tue, 14 May 2024 21:28:45 +0100 Subject: [PATCH 18/36] basics --- .../applications/interactive_cli/agent_steps.py | 15 +++++++++------ .../applications/interactive_cli/feature.py | 2 +- 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/gpt_engineer/applications/interactive_cli/agent_steps.py b/gpt_engineer/applications/interactive_cli/agent_steps.py index cb1e01af26..be46c66a91 100644 --- a/gpt_engineer/applications/interactive_cli/agent_steps.py +++ b/gpt_engineer/applications/interactive_cli/agent_steps.py @@ -103,22 +103,25 @@ def confirm_feature_context_and_task_with_user( return False -def check_if_task_is_complete(): +def complete_task(): + pass # feature.complete task - # then + # then def adjust_feature_task_or_files(): - # todo : create a function which uses the test4.py example code approach to offer a selection of options to the user + pass + # todo : create a function which uses the test4.py example code approach to offer a selection of options to the user - # c - complete the task and start a new one + # c - complete the task and start a new one - # f - "edit feature" using update_feature_description step + # f - "edit feature" using update_feature_description step # s - "edit file selection" using update_user_file_selection step # t - "edit task" using update_task_description step - # + # + def compare_files(f1: Files, f2: Files): def colored_diff(s1, s2): diff --git a/gpt_engineer/applications/interactive_cli/feature.py b/gpt_engineer/applications/interactive_cli/feature.py index bd42e9e86a..a22e50a421 100644 --- a/gpt_engineer/applications/interactive_cli/feature.py +++ b/gpt_engineer/applications/interactive_cli/feature.py @@ -20,7 +20,7 @@ class Feature(DiskMemory): def __init__(self, project_path: Union[str, Path]): self.feature_path = Path(project_path) / ".feature" - self.feature_filename = "description.md" + self.feature_filename = "feature.md" self.progress_filename = "progress.json" self.task_filename = "task.md" From a5aea1aa5b11a10b49a18091a23154d79e2e4223 Mon Sep 17 00:00:00 2001 From: Theo McCabe Date: Tue, 14 May 2024 21:40:43 +0100 Subject: [PATCH 19/36] remove prints --- gpt_engineer/applications/interactive_cli/file_selection.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/gpt_engineer/applications/interactive_cli/file_selection.py b/gpt_engineer/applications/interactive_cli/file_selection.py index 6802c41d40..ad3a03b09a 100644 --- a/gpt_engineer/applications/interactive_cli/file_selection.py +++ b/gpt_engineer/applications/interactive_cli/file_selection.py @@ -177,12 +177,8 @@ def update_yaml_from_tracked_files(self): selected_files, excluded_files = self._get_from_yaml() - print(set(selected_files + excluded_files)) - print(set(tracked_files)) - # If there are no changes, do nothing if set(tracked_files) == set(selected_files + excluded_files): - print("yep") return new_selected_files = list(set(tracked_files) - set(excluded_files)) From 86432144b714145f4c6a506b3bf8efce037c3a24 Mon Sep 17 00:00:00 2001 From: Theo McCabe Date: Tue, 14 May 2024 21:53:11 +0100 Subject: [PATCH 20/36] running to end --- .../applications/interactive_cli/agent.py | 5 +++-- .../interactive_cli/agent_steps.py | 19 +++++++------------ .../applications/interactive_cli/feature.py | 11 +++++++++-- 3 files changed, 19 insertions(+), 16 deletions(-) diff --git a/gpt_engineer/applications/interactive_cli/agent.py b/gpt_engineer/applications/interactive_cli/agent.py index 0807f4002c..219002d858 100644 --- a/gpt_engineer/applications/interactive_cli/agent.py +++ b/gpt_engineer/applications/interactive_cli/agent.py @@ -1,7 +1,6 @@ from feature import Feature from file_selection import FileSelection from repository import Repository -from files import Files from agent_steps import ( initialize_new_feature, update_user_file_selection, @@ -9,8 +8,8 @@ confirm_feature_context_and_task_with_user, run_improve_function, adjust_feature_task_or_files, + update_task_description, ) -from generation_tools import build_context_string from gpt_engineer.core.ai import AI from gpt_engineer.core.base_agent import BaseAgent @@ -41,6 +40,8 @@ def init(self): update_user_file_selection(self.file_selection) + update_task_description(self.feature) + self.resume() def resume(self): diff --git a/gpt_engineer/applications/interactive_cli/agent_steps.py b/gpt_engineer/applications/interactive_cli/agent_steps.py index be46c66a91..36bec2d6ea 100644 --- a/gpt_engineer/applications/interactive_cli/agent_steps.py +++ b/gpt_engineer/applications/interactive_cli/agent_steps.py @@ -30,18 +30,11 @@ def validate(self, document): def initialize_new_feature(ai: AI, feature: Feature): feature.clear_feature() - feature_description = cli_input( - "Write feature description: ", - multiline=True, - validator=FeatureValidator(), - bottom_toolbar="Press Ctrl+O to finish", - ) - - feature.set_description(feature_description) + update_feature_description(feature) # print("\n Ticket files created at .ticket \n ") - branch_name = generate_branch_name(ai, feature_description) + branch_name = generate_branch_name(ai, feature.get_description()) branch_name = cli_input("\nConfirm branch name: ", default=branch_name) @@ -53,7 +46,9 @@ def initialize_new_feature(ai: AI, feature: Feature): def update_user_file_selection(file_selection: FileSelection): file_selection.update_yaml_from_tracked_files() file_selection.open_yaml_in_editor() - input("Please edit the YAML file and then press Enter to continue...") + input( + "Please edit the file selection for this feature and then press Enter to continue..." + ) def update_feature_description(feature: Feature): @@ -62,8 +57,8 @@ def update_feature_description(feature: Feature): def update_task_description(feature: Feature): - feature.open_feature_in_editor() - input("Please edit the feature file and then press Enter to continue...") + feature.open_task_in_editor() + input("Please edit the task file and then press Enter to continue...") def check_for_unstaged_changes( diff --git a/gpt_engineer/applications/interactive_cli/feature.py b/gpt_engineer/applications/interactive_cli/feature.py index a22e50a421..1c0344f1e9 100644 --- a/gpt_engineer/applications/interactive_cli/feature.py +++ b/gpt_engineer/applications/interactive_cli/feature.py @@ -27,8 +27,15 @@ def __init__(self, project_path: Union[str, Path]): super().__init__(self.feature_path) def clear_feature(self) -> None: - self.set_description("") - self.set_task("") + self.set_description( + """Please replace with your own feature description. Markdown is supported. + +Hint: +Improve your prompts by including technical references to any APIs, libraries, components etc that the pre trained model may not know about in detail already.""" + ) + self.set_task( + "Please replace with a task description - directing the AI on the first task to implement on this feature" + ) super().__setitem__(self.progress_filename, json.dumps({"done": []})) def get_description(self) -> str: From 8977e8e2b2b0e20ed8d38834e9cdd71cb71e9622 Mon Sep 17 00:00:00 2001 From: Theo McCabe Date: Wed, 15 May 2024 09:12:11 +0100 Subject: [PATCH 21/36] running in little loop --- .../applications/interactive_cli/agent.py | 2 +- .../interactive_cli/agent_steps.py | 111 +++++++++--------- .../interactive_cli/repository.py | 23 ++++ 3 files changed, 77 insertions(+), 59 deletions(-) diff --git a/gpt_engineer/applications/interactive_cli/agent.py b/gpt_engineer/applications/interactive_cli/agent.py index 219002d858..11bb4634f0 100644 --- a/gpt_engineer/applications/interactive_cli/agent.py +++ b/gpt_engineer/applications/interactive_cli/agent.py @@ -36,7 +36,7 @@ def __init__( def init(self): - initialize_new_feature(self.ai, self.feature) + initialize_new_feature(self.ai, self.feature, self.repository) update_user_file_selection(self.file_selection) diff --git a/gpt_engineer/applications/interactive_cli/agent_steps.py b/gpt_engineer/applications/interactive_cli/agent_steps.py index 36bec2d6ea..cf8b85bd47 100644 --- a/gpt_engineer/applications/interactive_cli/agent_steps.py +++ b/gpt_engineer/applications/interactive_cli/agent_steps.py @@ -15,7 +15,8 @@ from prompt_toolkit import prompt as cli_input from prompt_toolkit.validation import ValidationError, Validator -import difflib +from prompt_toolkit import PromptSession as InputSession +from prompt_toolkit.completion import WordCompleter class FeatureValidator(Validator): @@ -27,18 +28,16 @@ def validate(self, document): ) -def initialize_new_feature(ai: AI, feature: Feature): +def initialize_new_feature(ai: AI, feature: Feature, repository: Repository): feature.clear_feature() update_feature_description(feature) - # print("\n Ticket files created at .ticket \n ") - branch_name = generate_branch_name(ai, feature.get_description()) branch_name = cli_input("\nConfirm branch name: ", default=branch_name) - # todo: use gitpython to create new branch. + repository.create_branch(branch_name) print("\nFeature branch created.\n") @@ -118,46 +117,6 @@ def adjust_feature_task_or_files(): # -def compare_files(f1: Files, f2: Files): - def colored_diff(s1, s2): - lines1 = s1.splitlines() - lines2 = s2.splitlines() - - diff = difflib.unified_diff(lines1, lines2, lineterm="") - - RED = "\033[38;5;202m" - GREEN = "\033[92m" - RESET = "\033[0m" - - colored_lines = [] - for line in diff: - if line.startswith("+"): - colored_lines.append(GREEN + line + RESET) - elif line.startswith("-"): - colored_lines.append(RED + line + RESET) - else: - colored_lines.append(line) - - return "\n".join(colored_lines) - - for file in sorted(set(f1) | set(f2)): - diff = colored_diff(f1.get(file, ""), f2.get(file, "")) - if diff: - print(f"Changes to {file}:") - print(diff) - - -def prompt_yesno() -> bool: - TERM_CHOICES = colored("y", "green") + "/" + colored("n", "red") + " " - while True: - response = input(TERM_CHOICES).strip().lower() - if response in ["y", "yes"]: - return True - if response in ["n", "no"]: - break - print("Please respond with 'y' or 'n'") - - def run_improve_function( project_path, feature: Feature, @@ -179,19 +138,55 @@ def run_improve_function( ai, prompt, files, memory, preprompts_holder, context_string ) + print("\n---- begining code generation ----\n") updated_files_dictionary = handle_improve_mode(improve_lambda, memory) - if not updated_files_dictionary or files == updated_files_dictionary: - print( - f"No changes applied. Could you please upload the debug_log_file.txt in {memory.path} folder in a github issue?" - ) - - else: - print("\nChanges to be made:") - compare_files(files, updated_files_dictionary) - - print() - print(colored("Do you want to apply these changes?", "light_green")) - if not prompt_yesno(): - return + print("\n---- ending code generation ----\n") files.write_to_disk(updated_files_dictionary) + + review_changes(project_path, feature, repository, ai, file_selection) + + +def review_changes( + project_path, + feature: Feature, + repository: Repository, + ai: AI, + file_selection: FileSelection, +): + + completer = WordCompleter(["r", "c", "u"], ignore_case=True) + session = InputSession() + + # Using prompt to get user input + result = session.prompt( + """Please review the unstaged changes generated by GPT Engineer.. + +r: Delete and retry the generation (incorporating changes to prompt files) +c: Complete task and stage changes +u: Undo changes and exit +""", + completer=completer, + ).lower() + + if result == "r": + print("Deleting changes and rerunning generation...") + repository.undo_unstaged_changes() + run_improve_function(project_path, feature, repository, ai, file_selection) + + if result == "c": + print("You have chosen to retry the generation.") + repository.stage_all_changes() + feature.complete_task() + if cli_input("Do you want to start a new task? y/n: ").lower() in [ + "y", + "yes", + ]: + update_task_description(feature) + run_improve_function(project_path, feature, repository, ai, file_selection) + return + + if result == "u": + print("Undo the last operation.") + repository.undo_unstaged_changes() + return diff --git a/gpt_engineer/applications/interactive_cli/repository.py b/gpt_engineer/applications/interactive_cli/repository.py index fe7dd852df..18b681c35f 100644 --- a/gpt_engineer/applications/interactive_cli/repository.py +++ b/gpt_engineer/applications/interactive_cli/repository.py @@ -115,3 +115,26 @@ def get_git_context(self): unstaged_changes, tracked_files, ) + + def create_branch(self, branch_name): + """ + Create a new branch in the repository. + + Parameters + ---------- + branch_name : str + The name of the new branch. + """ + self.repo.git.checkout("-b", branch_name) + + def stage_all_changes(self): + """ + Stage all changes in the repository. + """ + self.repo.git.add("--all") + + def undo_unstaged_changes(self): + """ + Undo all unstaged changes in the repository. + """ + self.repo.git.checkout("--", ".") From dd6daf69af3b0664526bba0dc8ce5a4ca7624ea5 Mon Sep 17 00:00:00 2001 From: Theo McCabe Date: Mon, 20 May 2024 09:14:35 +0100 Subject: [PATCH 22/36] wip --- .gitignore | 2 + .../applications/interactive_cli/agent.py | 15 +- .../interactive_cli/agent_steps.py | 23 +-- .../interactive_cli/file_selection.py | 168 +++++++++--------- .../interactive_cli/generation_tools.py | 64 +++++++ .../applications/interactive_cli/main.py | 13 +- .../applications/interactive_cli/settings.py | 3 + quicktest.py | 66 +++++++ .../applications/interactive_cli/__init__.py | 0 .../interactive_cli/test_file_selection.py | 118 ++++++++++++ 10 files changed, 370 insertions(+), 102 deletions(-) create mode 100644 gpt_engineer/applications/interactive_cli/settings.py create mode 100644 quicktest.py create mode 100644 tests/applications/interactive_cli/__init__.py create mode 100644 tests/applications/interactive_cli/test_file_selection.py diff --git a/.gitignore b/.gitignore index 5cbb23225b..e290f81e33 100644 --- a/.gitignore +++ b/.gitignore @@ -94,3 +94,5 @@ gpt_engineer/benchmark/benchmarks/apps/dataset gpt_engineer/benchmark/benchmarks/mbpp/dataset prompt + +.feature \ No newline at end of file diff --git a/gpt_engineer/applications/interactive_cli/agent.py b/gpt_engineer/applications/interactive_cli/agent.py index 11bb4634f0..33ae178988 100644 --- a/gpt_engineer/applications/interactive_cli/agent.py +++ b/gpt_engineer/applications/interactive_cli/agent.py @@ -1,12 +1,13 @@ from feature import Feature from file_selection import FileSelection from repository import Repository +from settings import Settings from agent_steps import ( initialize_new_feature, update_user_file_selection, check_for_unstaged_changes, confirm_feature_context_and_task_with_user, - run_improve_function, + run_task_loop, adjust_feature_task_or_files, update_task_description, ) @@ -34,17 +35,19 @@ def __init__( self.file_selection = FileSelection(project_path, repository) - def init(self): + def init(self, settings: Settings): - initialize_new_feature(self.ai, self.feature, self.repository) + initialize_new_feature( + self.ai, self.feature, self.repository, settings.no_branch + ) update_user_file_selection(self.file_selection) update_task_description(self.feature) - self.resume() + self.resume(settings) - def resume(self): + def resume(self, settings: Settings): implement = False @@ -57,7 +60,7 @@ def resume(self): check_for_unstaged_changes(self.repository) - run_improve_function( + run_task_loop( self.project_path, self.feature, self.repository, diff --git a/gpt_engineer/applications/interactive_cli/agent_steps.py b/gpt_engineer/applications/interactive_cli/agent_steps.py index cf8b85bd47..0d619cb6cf 100644 --- a/gpt_engineer/applications/interactive_cli/agent_steps.py +++ b/gpt_engineer/applications/interactive_cli/agent_steps.py @@ -28,7 +28,9 @@ def validate(self, document): ) -def initialize_new_feature(ai: AI, feature: Feature, repository: Repository): +def initialize_new_feature( + ai: AI, feature: Feature, repository: Repository, no_branch: bool +): feature.clear_feature() update_feature_description(feature) @@ -37,9 +39,9 @@ def initialize_new_feature(ai: AI, feature: Feature, repository: Repository): branch_name = cli_input("\nConfirm branch name: ", default=branch_name) - repository.create_branch(branch_name) - - print("\nFeature branch created.\n") + if not no_branch: + repository.create_branch(branch_name) + print("\nFeature branch created.\n") def update_user_file_selection(file_selection: FileSelection): @@ -77,14 +79,14 @@ def confirm_feature_context_and_task_with_user( feature: Feature, file_selection: FileSelection ): file_selection.update_yaml_from_tracked_files() + file_string = file_selection.get_pretty_from_yaml() feature_description = feature.get_description() - file_string = file_selection.get_pretty_from_yaml() task = feature.get_task() # list feature, files and task print(f"Feature: {feature_description}\n\n") - print(f"Files: {file_string}\n\n") + print(f"Files: \n\nrepo\n{file_string}\n\n") print(f"Task: {task}\n\n") # do you want to attempt this task? @@ -117,7 +119,7 @@ def adjust_feature_task_or_files(): # -def run_improve_function( +def run_task_loop( project_path, feature: Feature, repository: Repository, @@ -172,18 +174,19 @@ def review_changes( if result == "r": print("Deleting changes and rerunning generation...") repository.undo_unstaged_changes() - run_improve_function(project_path, feature, repository, ai, file_selection) + run_task_loop(project_path, feature, repository, ai, file_selection) if result == "c": - print("You have chosen to retry the generation.") + print("Completing task... ") repository.stage_all_changes() feature.complete_task() + file_selection.update_yaml_from_tracked_files() if cli_input("Do you want to start a new task? y/n: ").lower() in [ "y", "yes", ]: update_task_description(feature) - run_improve_function(project_path, feature, repository, ai, file_selection) + run_task_loop(project_path, feature, repository, ai, file_selection) return if result == "u": diff --git a/gpt_engineer/applications/interactive_cli/file_selection.py b/gpt_engineer/applications/interactive_cli/file_selection.py index ad3a03b09a..304877fe83 100644 --- a/gpt_engineer/applications/interactive_cli/file_selection.py +++ b/gpt_engineer/applications/interactive_cli/file_selection.py @@ -3,6 +3,7 @@ import subprocess from typing import List, Tuple +from collections import defaultdict import yaml @@ -17,40 +18,61 @@ def __init__(self, project_path: str, repository): self.yaml_path = os.path.join(project_path, ".feature", "files.yml") self._initialize() - def _create_nested_structure_from_file_paths(self, files_paths): - files_paths.sort() - file_structure = [] - for filepath in files_paths: - parts = filepath.split("/") - # Filter out the '.ticket' directory from paths - if ".ticket" in parts or ".feature" in parts: - continue - node = file_structure - for i, part in enumerate(parts[:-1]): - # Append '/' to part if it's a directory - directory = part if part.endswith("/") else part + "/" - found = False - for item in node: - if isinstance(item, dict) and directory in item: - node = item[directory] - found = True - break - if not found: - new_node = [] - # Insert directory at the correct position (before any file) - index = next( - (idx for idx, item in enumerate(node) if isinstance(item, str)), - len(node), - ) - node.insert(index, {directory: new_node}) - node = new_node - # Add the file to the last node, ensuring directories are listed first - if not parts[-1].endswith("/"): - node.append(parts[-1]) - - return file_structure - - def _write_yaml_with_comments(self, yaml_content): + def _paths_to_tree(self, paths): + def nested_dict(): + return defaultdict(nested_dict) + + tree = nested_dict() + + files_marker = "(./)" + + for path in paths: + parts = path.split(os.sep) + file = parts.pop() + d = tree + for part in parts: + d = d[part] + if files_marker not in d: + d[files_marker] = [] + d[files_marker].append(file) + + def default_to_regular(d): + if isinstance(d, defaultdict): + d = {k: default_to_regular(v) for k, v in d.items()} + return d + + def ordered_dict(data): + if isinstance(data, dict): + keys = sorted(data.keys(), key=lambda x: (x == files_marker, x)) + return {k: ordered_dict(data[k]) for k in keys} + return data + + ordered_tree = ordered_dict(default_to_regular(tree)) + + return ordered_tree + # return yaml.dump(tree, sort_keys=False) + + def _tree_to_paths(self, tree): + + files_marker = "(./)" + + def traverse_tree(tree, base_path=""): + paths = [] + if tree: + for key, value in tree.items(): + if key == files_marker: + if value: + for file in value: + paths.append(os.path.join(base_path, file)) + else: + subfolder_path = os.path.join(base_path, key) + paths.extend(traverse_tree(value, subfolder_path)) + return paths + + # tree = yaml.safe_load(yaml_content) + return traverse_tree(tree) + + def _write_yaml_with_header(self, yaml_content): with open(self.yaml_path, "w") as file: file.write( f"""# Complete list of files shared with the AI @@ -69,69 +91,44 @@ def _initialize(self): print("YAML file is missing or empty, generating YAML...") - file_structure = self._create_nested_structure_from_file_paths( - self.repository.get_tracked_files() - ) + tree = self._paths_to_tree(self.repository.get_tracked_files()) - self._write_yaml_with_comments( - yaml.safe_dump( - file_structure, default_flow_style=False, sort_keys=False, indent=2 - ) - ) + self._write_yaml_with_header(yaml.dump(tree, sort_keys=False)) def _get_from_yaml(self) -> Tuple[List[str], List[str]]: with open(self.yaml_path, "r") as file: - original_content = file.readlines()[3:] # Skip the 3 instruction lines + original_content_lines = file.readlines()[ + 3: + ] # Skip the 3 instruction lines # Create a version of the content with all lines uncommented - uncommented_content = "".join(line.lstrip("# ") for line in original_content) - - # Load the original and uncommented content as YAML - original_structure = yaml.safe_load("".join(original_content)) - uncommented_structure = yaml.safe_load(uncommented_content) + commented_content = "".join(original_content_lines) + uncommented_content = "".join( + line.replace("# ", "").replace("#", "") for line in original_content_lines + ) - def recurse_items(items, path=""): - paths = [] - if isinstance(items, dict): - for key, value in items.items(): - new_path = os.path.join(path, key) - paths.extend(recurse_items(value, new_path)) - elif isinstance(items, list): - for item in items: - if isinstance(item, dict): - paths.extend(recurse_items(item, path)) - else: - paths.append(os.path.join(path, item)) - else: - paths.append(path) - return paths + print(uncommented_content) - original_paths = recurse_items(original_structure) - uncommented_paths = recurse_items(uncommented_structure) + included_files = self._tree_to_paths(yaml.safe_load(commented_content)) + all_files = self._tree_to_paths(yaml.safe_load(uncommented_content)) # Determine excluded files by finding the difference - excluded_files = list(set(uncommented_paths) - set(original_paths)) + excluded_files = list(set(all_files) - set(included_files)) - return (original_paths, excluded_files) + return (included_files, excluded_files) def _set_to_yaml(self, selected_files, excluded_files): # Dont worry about commenting lines if they are no excluded files if not excluded_files: - file_structure = self._create_nested_structure_from_file_paths( - selected_files - ) + tree = self._paths_to_tree(selected_files) - self._write_yaml_with_comments( - yaml.safe_dump( - file_structure, default_flow_style=False, sort_keys=False, indent=2 - ) - ) + self._write_yaml_with_header(yaml.dump(tree, sort_keys=False)) return all_files = list(selected_files) + list(excluded_files) - current_structure = self._create_nested_structure_from_file_paths(all_files) + current_tree = self._paths_to_tree(all_files) # Add a # in front of files which are excluded. This is a marker for us to go back and properly comment them out def mark_excluded_files(structure, prefix=""): @@ -144,7 +141,7 @@ def mark_excluded_files(structure, prefix=""): if full_path in excluded_files: structure[i] = f"#{item}" - mark_excluded_files(current_structure) + mark_excluded_files(current_tree) # Find all files marked for commenting - add comment and remove the mark. def comment_marked_files(yaml_content): @@ -153,18 +150,16 @@ def comment_marked_files(yaml_content): updated_lines = [] for line in lines: if "#" in line: - line = "#" + line.replace("#", "").strip() + line = "#" + line.replace("#", "") updated_lines.append(line) return "\n".join(updated_lines) - content = yaml.safe_dump( - current_structure, default_flow_style=False, sort_keys=False, indent=2 - ) + content = yaml.dump(tree, sort_keys=False) updated_content = comment_marked_files(content) - self._write_yaml_with_comments(updated_content) + self._write_yaml_with_header(updated_content) return @@ -177,6 +172,8 @@ def update_yaml_from_tracked_files(self): selected_files, excluded_files = self._get_from_yaml() + print(excluded_files) + # If there are no changes, do nothing if set(tracked_files) == set(selected_files + excluded_files): return @@ -219,8 +216,11 @@ def insert_path(tree, path_parts): # Helper function to format the tree into a string with ASCII graphics def format_tree(tree, prefix=""): lines = [] - # Sorted to keep alphabetical order - items = sorted(tree.items()) + # Separate directories and files + directories = {k: v for k, v in tree.items() if v} + files = {k: v for k, v in tree.items() if not v} + # Sort items to keep alphabetical order, directories first + items = sorted(directories.items()) + sorted(files.items()) for i, (key, sub_tree) in enumerate(items): if i == len(items) - 1: # Last item uses └── lines.append(prefix + "└── " + key) diff --git a/gpt_engineer/applications/interactive_cli/generation_tools.py b/gpt_engineer/applications/interactive_cli/generation_tools.py index ad3487840b..29c43117c8 100644 --- a/gpt_engineer/applications/interactive_cli/generation_tools.py +++ b/gpt_engineer/applications/interactive_cli/generation_tools.py @@ -128,3 +128,67 @@ def generate_suggested_tasks( xml = messages[-1].content.strip() return parse_task_xml_to_class(xml).tasks + + +def fuzzy_parse_yaml_files(ai: AI, yaml_string: str) -> str: + system_prompt = """ +You are a fuzzy yaml parser, who correctly parses yaml even if it is not strictly valid. + +A user has been given a yaml representation of a file structure, represented in block collections like so: + +- folder1/: + - folder2/: + - file3 + - file4 + - file1 + - file2 +- file5 +- file6 + +They have been asked to comment out any files that they wish to be excluded. + +An example of the yaml file after commenting might be something like this: + +- folder1/: + - folder2/: + # - file1 + # - file2 + - folder3/: + - file3 + # - file4 + - file5 +# - file6 +- file7 + +Although this isnt strictly correct yaml, their intentions are clear. + +Your job is to return the list of included files, and the list of excluded files as json. + +The json you should return will be like this: + +{ + "included_files": [ + "folder1/file5", + "folder1/folder3/file3", + "file7" + ], + "excluded_files": [ + "folder1/folder2/file1", + "folder1/folder2/file2", + "folder1/folder3/file4", + "folder1/file5", + ] +} + +Files can only be included or excluded, not both. If you are confused about the state of a file make your best guess - and if you really arent sure then mark it as included. +""" + + # ai.llm.callbacks.clear() # silent + + messages = ai.start(system_prompt, yaml_string, step_name="fuzzy-parse-yaml") + + # ai.llm.callbacks.append(StreamingStdOutCallbackHandler()) + + xml = messages[-1].content.strip() + + return parse_task_xml_to_class(xml).tasks diff --git a/gpt_engineer/applications/interactive_cli/main.py b/gpt_engineer/applications/interactive_cli/main.py index 073413311c..b6835b7c16 100644 --- a/gpt_engineer/applications/interactive_cli/main.py +++ b/gpt_engineer/applications/interactive_cli/main.py @@ -4,6 +4,7 @@ from dotenv import load_dotenv from feature import Feature from repository import Repository +from settings import Settings from gpt_engineer.core.ai import AI @@ -15,6 +16,12 @@ def main( project_path: str = typer.Argument(".", help="path"), model: str = typer.Argument("gpt-4-turbo", help="model id string"), new: bool = typer.Option(False, "--new", "-n", help="Initialize new feature."), + no_branch: bool = typer.Option( + False, + "--no-branch", + "-nb", + help="Do not create a new feature branch for this work.", + ), temperature: float = typer.Option( 0.1, "--temperature", @@ -56,10 +63,12 @@ def main( agent = FeatureAgent(project_path, feature, repository, ai) + settings = Settings(no_branch) + if new: - agent.init() + agent.init(settings) else: - agent.resume() + agent.resume(settings) if __name__ == "__main__": diff --git a/gpt_engineer/applications/interactive_cli/settings.py b/gpt_engineer/applications/interactive_cli/settings.py new file mode 100644 index 0000000000..fa12988c65 --- /dev/null +++ b/gpt_engineer/applications/interactive_cli/settings.py @@ -0,0 +1,3 @@ +class Settings: + def __init__(self, no_branch: bool = False): + self.no_branch = no_branch diff --git a/quicktest.py b/quicktest.py new file mode 100644 index 0000000000..0f2a445d4e --- /dev/null +++ b/quicktest.py @@ -0,0 +1,66 @@ +import os +import yaml +from collections import defaultdict + + +def paths_to_yaml(paths): + def nested_dict(): + return defaultdict(nested_dict) + + tree = nested_dict() + + for path in paths: + parts = path.split(os.sep) + file = parts.pop() + d = tree + for part in parts: + d = d[part] + if "/" not in d: + d["/"] = [] + d["/"].append(file) + + def default_to_regular(d): + if isinstance(d, defaultdict): + d = {k: default_to_regular(v) for k, v in d.items()} + return d + + tree = default_to_regular(tree) + + return yaml.dump(tree, sort_keys=False) + + +def yaml_to_paths(yaml_content): + def traverse_tree(tree, base_path=""): + paths = [] + for key, value in tree.items(): + if key == "./": + for file in value: + paths.append(os.path.join(base_path, file)) + else: + subfolder_path = os.path.join(base_path, key) + paths.extend(traverse_tree(value, subfolder_path)) + return paths + + tree = yaml.safe_load(yaml_content) + return traverse_tree(tree) + + +# Example usage +yaml_content = """ +folder: + ./: + # - file1.txt + - file2.txt + subfolder: + ./: + - file3.txt +""" + +paths = yaml_to_paths(yaml_content) +print(paths) + + +# paths = ["folder/file1.txt", "folder/file2.txt", "folder/subfolder/file3.txt"] + +# yaml_output = paths_to_yaml(paths) +# print(yaml_output) diff --git a/tests/applications/interactive_cli/__init__.py b/tests/applications/interactive_cli/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/applications/interactive_cli/test_file_selection.py b/tests/applications/interactive_cli/test_file_selection.py new file mode 100644 index 0000000000..6a069e58cf --- /dev/null +++ b/tests/applications/interactive_cli/test_file_selection.py @@ -0,0 +1,118 @@ +import os +import shutil +import tempfile +import unittest + +from gpt_engineer.applications.interactive_cli.file_selection import FileSelection + + +class MockRepository: + def __init__(self, files): + self.files = files + + def get_tracked_files(self): + return self.files + + +class TestFileSelection(unittest.TestCase): + + def setUp(self): + # Create a temporary directory for the test + self.test_dir = tempfile.mkdtemp() + self.project_path = self.test_dir + os.makedirs(os.path.join(self.project_path, ".feature"), exist_ok=True) + + # Initial file structure for the mock repository + self.initial_files = [ + "folder1/file1", + "folder1/file2", + "folder1/folder2/file3", + "folder1/folder2/file4", + "file5", + "file6", + ] + self.repository = MockRepository(self.initial_files) + + # Initialize the FileSelection object + self.file_selection = FileSelection(self.project_path, self.repository) + + def tearDown(self): + # Remove the temporary directory after the test + shutil.rmtree(self.test_dir) + + def test_lifecycle(self): + # Step 1: Create YAML file from the mock repository + self.file_selection._initialize() + expected_yaml_initial = """# Complete list of files shared with the AI +# Please comment out any files not needed as context for this change +# This saves money and avoids overwhelming the AI +- folder1/: + - folder2/: + - file3 + - file4 + - file1 + - file2 +- file5 +- file6 +""" + with open(self.file_selection.yaml_path, "r") as file: + initial_yaml_content = file.read() + + self.assertEqual(initial_yaml_content, expected_yaml_initial) + + # Step 2: Update the YAML file directly (simulating user comments) + edited_yaml_content = """# Complete list of files shared with the AI +# Please comment out any files not needed as context for this change +# This saves money and avoids overwhelming the AI +- folder1/: + - folder2/: + # - file3 + # - file4 + # - file1 + - file2 +# - file5 +- file6 +""" + with open(self.file_selection.yaml_path, "w") as file: + file.write(edited_yaml_content) + + # Step 3: Update tracked files in the repository and update the YAML file + new_files = [ + "folder1/file1", + "folder1/file2", + "folder1/folder2/file3", + "folder1/folder2/file4", + "file5", + "file6", + "newfile7", + ] + self.repository.files = new_files + self.file_selection.update_yaml_from_tracked_files() + + expected_yaml_updated = """# Complete list of files shared with the AI +# Please comment out any files not needed as context for this change +# This saves money and avoids overwhelming the AI +- folder1/: + - folder2/: + # - file3 + # - file4 + # - file1 + - file2 +# - file5 +- file6 +- newfile7 +""" + with open(self.file_selection.yaml_path, "r") as file: + updated_yaml_content = file.read() + + self.assertEqual(updated_yaml_content, expected_yaml_updated) + + # Step 4: Get files from YAML and verify + selected_files = self.file_selection.get_from_yaml() + expected_selected_files = ["folder1/file2", "file6", "newfile7"] + + self.assertEqual(selected_files, expected_selected_files) + + +if __name__ == "__main__": + unittest.main() From d20c78626c39e4f325c11be6530086cdcc29d369 Mon Sep 17 00:00:00 2001 From: Theo McCabe Date: Mon, 20 May 2024 21:59:00 +0100 Subject: [PATCH 23/36] wip -bad fuzzy matching --- .../applications/interactive_cli/__init__.py | 0 .../applications/interactive_cli/agent.py | 18 +- .../interactive_cli/agent_steps.py | 40 +- .../applications/interactive_cli/domain.py | 13 + .../interactive_cli/file_selection.py | 318 +++++++++------- .../interactive_cli/generation_tools.py | 359 ++++++++++++++++-- .../applications/interactive_cli/main.py | 10 +- .../applications/interactive_cli/settings.py | 3 - .../interactive_cli/test_file_selection.py | 319 ++++++++++------ 9 files changed, 759 insertions(+), 321 deletions(-) create mode 100644 gpt_engineer/applications/interactive_cli/__init__.py create mode 100644 gpt_engineer/applications/interactive_cli/domain.py delete mode 100644 gpt_engineer/applications/interactive_cli/settings.py diff --git a/gpt_engineer/applications/interactive_cli/__init__.py b/gpt_engineer/applications/interactive_cli/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/gpt_engineer/applications/interactive_cli/agent.py b/gpt_engineer/applications/interactive_cli/agent.py index 33ae178988..e20667ad83 100644 --- a/gpt_engineer/applications/interactive_cli/agent.py +++ b/gpt_engineer/applications/interactive_cli/agent.py @@ -1,8 +1,8 @@ -from feature import Feature -from file_selection import FileSelection -from repository import Repository -from settings import Settings -from agent_steps import ( +from gpt_engineer.applications.interactive_cli.feature import Feature +from gpt_engineer.applications.interactive_cli.file_selection import FileSelector +from gpt_engineer.applications.interactive_cli.repository import Repository +from gpt_engineer.applications.interactive_cli.domain import Settings +from gpt_engineer.applications.interactive_cli.agent_steps import ( initialize_new_feature, update_user_file_selection, check_for_unstaged_changes, @@ -33,7 +33,7 @@ def __init__( self.repository = repository self.ai = ai or AI() - self.file_selection = FileSelection(project_path, repository) + self.file_selector = FileSelector(project_path, repository) def init(self, settings: Settings): @@ -41,7 +41,7 @@ def init(self, settings: Settings): self.ai, self.feature, self.repository, settings.no_branch ) - update_user_file_selection(self.file_selection) + update_user_file_selection(self.file_selector) update_task_description(self.feature) @@ -53,7 +53,7 @@ def resume(self, settings: Settings): while not implement: implement = confirm_feature_context_and_task_with_user( - self.feature, self.file_selection + self.feature, self.file_selector ) adjust_feature_task_or_files() @@ -65,7 +65,7 @@ def resume(self, settings: Settings): self.feature, self.repository, self.ai, - self.file_selection, + self.file_selector, ) def improve(self): diff --git a/gpt_engineer/applications/interactive_cli/agent_steps.py b/gpt_engineer/applications/interactive_cli/agent_steps.py index 0d619cb6cf..34e5350a97 100644 --- a/gpt_engineer/applications/interactive_cli/agent_steps.py +++ b/gpt_engineer/applications/interactive_cli/agent_steps.py @@ -1,9 +1,11 @@ -from feature import Feature -from file_selection import FileSelection -from repository import Repository -from files import Files -from generation_tools import generate_branch_name, build_context_string -from termcolor import colored +from gpt_engineer.applications.interactive_cli.feature import Feature +from gpt_engineer.applications.interactive_cli.file_selection import FileSelector +from gpt_engineer.applications.interactive_cli.repository import Repository +from gpt_engineer.applications.interactive_cli.files import Files +from gpt_engineer.applications.interactive_cli.generation_tools import ( + generate_branch_name, + build_context_string, +) from gpt_engineer.core.ai import AI from gpt_engineer.core.prompt import Prompt @@ -44,9 +46,9 @@ def initialize_new_feature( print("\nFeature branch created.\n") -def update_user_file_selection(file_selection: FileSelection): - file_selection.update_yaml_from_tracked_files() - file_selection.open_yaml_in_editor() +def update_user_file_selection(file_selector: FileSelector): + file_selector.update_yaml_from_tracked_files() + file_selector.open_yaml_in_editor() input( "Please edit the file selection for this feature and then press Enter to continue..." ) @@ -76,10 +78,10 @@ def check_for_unstaged_changes( def confirm_feature_context_and_task_with_user( - feature: Feature, file_selection: FileSelection + feature: Feature, file_selector: FileSelector ): - file_selection.update_yaml_from_tracked_files() - file_string = file_selection.get_pretty_from_yaml() + file_selector.update_yaml_from_tracked_files() + file_string = file_selector.get_pretty_selected_from_yaml() feature_description = feature.get_description() task = feature.get_task() @@ -124,7 +126,7 @@ def run_task_loop( feature: Feature, repository: Repository, ai: AI, - file_selection: FileSelection, + file_selector: FileSelector, ): memory = DiskMemory(memory_path(project_path)) @@ -134,7 +136,7 @@ def run_task_loop( prompt = Prompt(feature.get_task(), prefix="Task: ") - files = Files(project_path, file_selection.get_from_yaml()) + files = Files(project_path, file_selector.get_from_yaml()) improve_lambda = lambda: improve_fn( ai, prompt, files, memory, preprompts_holder, context_string @@ -146,7 +148,7 @@ def run_task_loop( files.write_to_disk(updated_files_dictionary) - review_changes(project_path, feature, repository, ai, file_selection) + review_changes(project_path, feature, repository, ai, file_selector) def review_changes( @@ -154,7 +156,7 @@ def review_changes( feature: Feature, repository: Repository, ai: AI, - file_selection: FileSelection, + file_selector: FileSelector, ): completer = WordCompleter(["r", "c", "u"], ignore_case=True) @@ -174,19 +176,19 @@ def review_changes( if result == "r": print("Deleting changes and rerunning generation...") repository.undo_unstaged_changes() - run_task_loop(project_path, feature, repository, ai, file_selection) + run_task_loop(project_path, feature, repository, ai, file_selector) if result == "c": print("Completing task... ") repository.stage_all_changes() feature.complete_task() - file_selection.update_yaml_from_tracked_files() + file_selector.update_yaml_from_tracked_files() if cli_input("Do you want to start a new task? y/n: ").lower() in [ "y", "yes", ]: update_task_description(feature) - run_task_loop(project_path, feature, repository, ai, file_selection) + run_task_loop(project_path, feature, repository, ai, file_selector) return if result == "u": diff --git a/gpt_engineer/applications/interactive_cli/domain.py b/gpt_engineer/applications/interactive_cli/domain.py new file mode 100644 index 0000000000..2493c40772 --- /dev/null +++ b/gpt_engineer/applications/interactive_cli/domain.py @@ -0,0 +1,13 @@ +from dataclasses import dataclass +from typing import List + + +@dataclass +class FileSelection: + included_files: List[str] + excluded_files: List[str] + + +class Settings: + def __init__(self, no_branch: bool = False): + self.no_branch = no_branch diff --git a/gpt_engineer/applications/interactive_cli/file_selection.py b/gpt_engineer/applications/interactive_cli/file_selection.py index 304877fe83..1b7e18863b 100644 --- a/gpt_engineer/applications/interactive_cli/file_selection.py +++ b/gpt_engineer/applications/interactive_cli/file_selection.py @@ -1,86 +1,192 @@ import os import platform import subprocess +import yaml -from typing import List, Tuple -from collections import defaultdict +from gpt_engineer.applications.interactive_cli.repository import Repository +from gpt_engineer.core.ai import AI +from gpt_engineer.applications.interactive_cli.generation_tools import ( + fuzzy_parse_file_selection, +) +from gpt_engineer.applications.interactive_cli.domain import FileSelection + + +def paths_to_tree(paths): + tree = {} + files_marker = "(./)" + + for path in paths: + parts = path.split("/") + current_level = tree + + for part in parts[:-1]: + if part not in current_level: + current_level[part] = {} + current_level = current_level[part] + + if isinstance(current_level, dict): + if files_marker not in current_level: + current_level[files_marker] = [] + current_level[files_marker].append(parts[-1]) + + # Clean and sort the tree to match the required format + def clean_tree(node): + if not isinstance(node, dict): + return node + sorted_keys = sorted(node.keys(), key=lambda x: (x == files_marker, x)) + cleaned_node = {key: clean_tree(node[key]) for key in sorted_keys} + if sorted_keys == [files_marker]: + return cleaned_node[files_marker] + return cleaned_node + + cleaned_tree = clean_tree(tree) + return cleaned_tree + + +def tree_to_paths(tree): + + files_marker = "(./)" + + def traverse_tree(tree, base_path=""): + paths = [] + if tree: + for key, value in tree.items(): + if key == files_marker: + if value: + for file in value: + paths.append(os.path.join(base_path, file)) + elif isinstance(value, list): + for file in value: + paths.append(os.path.join(base_path, key, file)) + else: + subfolder_path = os.path.join(base_path, key) + paths.extend(traverse_tree(value, subfolder_path)) + return paths + + return traverse_tree(tree) + + +def commented_yaml_to_file_selection(commented_content) -> FileSelection: + commented_content_lines = commented_content.split("\n") + uncommented_content_1 = "\n".join( + line.replace("# ", "").replace("#", "") for line in commented_content_lines + ) + uncommented_content_2 = "\n".join( + line.replace("#", "") for line in commented_content_lines + ) + + included_files = tree_to_paths(yaml.safe_load(commented_content)) + try: + all_files = tree_to_paths(yaml.safe_load(uncommented_content_1)) + except: + try: + all_files = tree_to_paths(yaml.safe_load(uncommented_content_2)) + except: + raise ValueError( + "Could not convert the commented yaml to a file selection. Please check the format." + ) -import yaml + included_files_not_in_all_files = set(included_files) - set(all_files) + if len(included_files_not_in_all_files) > 0: + raise ValueError("Yaml file selection has not been read correctly.") -class FileSelection: - """ - Manages the active files in a project directory and creates a YAML file listing them. - """ + excluded_files = list(set(all_files) - set(included_files)) + return FileSelection(included_files, excluded_files) - def __init__(self, project_path: str, repository): - self.repository = repository - self.yaml_path = os.path.join(project_path, ".feature", "files.yml") - self._initialize() - def _paths_to_tree(self, paths): - def nested_dict(): - return defaultdict(nested_dict) +def file_selection_to_commented_yaml(selection: FileSelection) -> str: + # Dont worry about commenting lines if they are no excluded files + if not selection.excluded_files: + tree = paths_to_tree(selection.included_files) + + return yaml.dump(tree, sort_keys=False) - tree = nested_dict() + all_files = list(selection.included_files) + list(selection.excluded_files) - files_marker = "(./)" + current_tree = paths_to_tree(all_files) + + # Add a # in front of files which are excluded. This is a marker for us to go back and properly comment them out + def mark_excluded_files(structure, prefix=""): + if isinstance(structure, dict): + for key, value in structure.items(): + if key == "(./)": + structure[key] = mark_excluded_files(value, prefix) + else: + new_prefix = os.path.join(prefix, key) + structure[key] = mark_excluded_files(value, new_prefix) + elif isinstance(structure, list): + for i, item in enumerate(structure): + full_path = os.path.join(prefix, item) - for path in paths: - parts = path.split(os.sep) - file = parts.pop() - d = tree - for part in parts: - d = d[part] - if files_marker not in d: - d[files_marker] = [] - d[files_marker].append(file) + if full_path in selection.excluded_files: + structure[i] = f"#{item}" - def default_to_regular(d): - if isinstance(d, defaultdict): - d = {k: default_to_regular(v) for k, v in d.items()} - return d + return structure - def ordered_dict(data): - if isinstance(data, dict): - keys = sorted(data.keys(), key=lambda x: (x == files_marker, x)) - return {k: ordered_dict(data[k]) for k in keys} - return data + mark_excluded_files(current_tree) - ordered_tree = ordered_dict(default_to_regular(tree)) + content = yaml.dump(current_tree, sort_keys=False) - return ordered_tree - # return yaml.dump(tree, sort_keys=False) + # Find all files marked for commenting - add comment and remove the mark. + def comment_marked_files(yaml_content): + lines = yaml_content.split("\n") - def _tree_to_paths(self, tree): + updated_lines = [] + for line in lines: + if "#" in line: + line = "#" + line.replace("#", "").replace("'", "") + updated_lines.append(line) - files_marker = "(./)" + return "\n".join(updated_lines) - def traverse_tree(tree, base_path=""): - paths = [] - if tree: - for key, value in tree.items(): - if key == files_marker: - if value: - for file in value: - paths.append(os.path.join(base_path, file)) - else: - subfolder_path = os.path.join(base_path, key) - paths.extend(traverse_tree(value, subfolder_path)) - return paths + commented_yaml = comment_marked_files(content) - # tree = yaml.safe_load(yaml_content) - return traverse_tree(tree) + return commented_yaml + + +class FileSelector: + """ + Manages the active files in a project directory and creates a YAML file listing them. + """ + + def __init__(self, project_path: str, repository: Repository): + self.ai = AI("gpt-4o", temperature=0) + self.repository = repository + self.yaml_path = os.path.join(project_path, ".feature", "files.yml") + self._initialize() def _write_yaml_with_header(self, yaml_content): + def add_indentation(content): + lines = content.split("\n") + new_lines = [] + last_key = None + + for line in lines: + stripped_line = line.strip() + if stripped_line.endswith(":"): + last_key = stripped_line + if stripped_line.startswith("- ") and (last_key != "(./):"): + new_lines.append(" " + line) # Add extra indentation + else: + new_lines.append(line) + return "\n".join(new_lines) + + indented_content = add_indentation(yaml_content) with open(self.yaml_path, "w") as file: file.write( f"""# Complete list of files shared with the AI # Please comment out any files not needed as context for this change # This saves money and avoids overwhelming the AI -{yaml_content}""" +{indented_content}""" ) + def _read_yaml_with_headers(self): + with open(self.yaml_path, "r") as file: + original_content_lines = file.readlines()[3:] + + return "".join(original_content_lines) + def _initialize(self): """ Generates a YAML file from the tracked files if one doesnt exist @@ -91,75 +197,15 @@ def _initialize(self): print("YAML file is missing or empty, generating YAML...") - tree = self._paths_to_tree(self.repository.get_tracked_files()) - - self._write_yaml_with_header(yaml.dump(tree, sort_keys=False)) - - def _get_from_yaml(self) -> Tuple[List[str], List[str]]: - with open(self.yaml_path, "r") as file: - original_content_lines = file.readlines()[ - 3: - ] # Skip the 3 instruction lines - - # Create a version of the content with all lines uncommented - commented_content = "".join(original_content_lines) - uncommented_content = "".join( - line.replace("# ", "").replace("#", "") for line in original_content_lines - ) - - print(uncommented_content) - - included_files = self._tree_to_paths(yaml.safe_load(commented_content)) - all_files = self._tree_to_paths(yaml.safe_load(uncommented_content)) + tree = paths_to_tree(self.repository.get_tracked_files()) - # Determine excluded files by finding the difference - excluded_files = list(set(all_files) - set(included_files)) + self._write_yaml_with_header(yaml.dump(tree, sort_keys=False, indent=2)) - return (included_files, excluded_files) + def set_to_yaml(self, file_selection): - def _set_to_yaml(self, selected_files, excluded_files): - # Dont worry about commenting lines if they are no excluded files - if not excluded_files: - tree = self._paths_to_tree(selected_files) + commented_yaml = file_selection_to_commented_yaml(file_selection) - self._write_yaml_with_header(yaml.dump(tree, sort_keys=False)) - - return - - all_files = list(selected_files) + list(excluded_files) - - current_tree = self._paths_to_tree(all_files) - - # Add a # in front of files which are excluded. This is a marker for us to go back and properly comment them out - def mark_excluded_files(structure, prefix=""): - for i, item in enumerate(structure): - if isinstance(item, dict): - for key, value in item.items(): - mark_excluded_files(value, prefix + key) - else: - full_path = prefix + item - if full_path in excluded_files: - structure[i] = f"#{item}" - - mark_excluded_files(current_tree) - - # Find all files marked for commenting - add comment and remove the mark. - def comment_marked_files(yaml_content): - lines = yaml_content.split("\n") - - updated_lines = [] - for line in lines: - if "#" in line: - line = "#" + line.replace("#", "") - updated_lines.append(line) - - return "\n".join(updated_lines) - - content = yaml.dump(tree, sort_keys=False) - - updated_content = comment_marked_files(content) - - self._write_yaml_with_header(updated_content) + self._write_yaml_with_header(commented_yaml) return @@ -170,28 +216,42 @@ def update_yaml_from_tracked_files(self): tracked_files = self.repository.get_tracked_files() - selected_files, excluded_files = self._get_from_yaml() + file_selection = self.get_from_yaml() - print(excluded_files) + print(file_selection.excluded_files) # If there are no changes, do nothing - if set(tracked_files) == set(selected_files + excluded_files): + if set(tracked_files) == set( + file_selection.included_files + file_selection.excluded_files + ): return - new_selected_files = list(set(tracked_files) - set(excluded_files)) + new_included_files = list( + set(tracked_files) - set(file_selection.excluded_files) + ) - self._set_to_yaml(new_selected_files, excluded_files) + self._set_to_yaml(new_included_files, file_selection.excluded_files) - def get_from_yaml(self): + def get_from_yaml(self) -> FileSelection: """ - Get selected file paths from yaml + Get selected file paths and excluded file paths from yaml """ - selected_files, excluded_files = self._get_from_yaml() + yaml_content = self._read_yaml_with_headers() + + try: + file_selection = commented_yaml_to_file_selection(yaml_content) + except: + print( + "Could not read the file selection from the YAML file. Attempting to fix with AI" + ) + print(yaml_content) + file_selection = fuzzy_parse_file_selection(self.ai, yaml_content) + self.set_to_yaml(file_selection) - return selected_files + return file_selection - def get_pretty_from_yaml(self): + def get_pretty_selected_from_yaml(self) -> str: """ Retrieves selected file paths from the YAML file and prints them in an ASCII-style tree structure. """ diff --git a/gpt_engineer/applications/interactive_cli/generation_tools.py b/gpt_engineer/applications/interactive_cli/generation_tools.py index 29c43117c8..d5c27bff96 100644 --- a/gpt_engineer/applications/interactive_cli/generation_tools.py +++ b/gpt_engineer/applications/interactive_cli/generation_tools.py @@ -1,12 +1,14 @@ import xml.etree.ElementTree as ET +import json -from feature import Feature -from files import Files -from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler -from repository import GitContext - +from gpt_engineer.applications.interactive_cli.feature import Feature +from gpt_engineer.applications.interactive_cli.files import Files +from gpt_engineer.applications.interactive_cli.domain import FileSelection +from gpt_engineer.applications.interactive_cli.repository import GitContext from gpt_engineer.core.ai import AI +from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler + def generate_branch_name(ai: AI, feature_description: str) -> str: system_prompt = """ @@ -130,42 +132,105 @@ def generate_suggested_tasks( return parse_task_xml_to_class(xml).tasks -def fuzzy_parse_yaml_files(ai: AI, yaml_string: str) -> str: - system_prompt = """ -You are a fuzzy yaml parser, who correctly parses yaml even if it is not strictly valid. - -A user has been given a yaml representation of a file structure, represented in block collections like so: - -- folder1/: - - folder2/: - - file3 - - file4 - - file1 - - file2 -- file5 -- file6 - -They have been asked to comment out any files that they wish to be excluded. - -An example of the yaml file after commenting might be something like this: +def fuzzy_parse_file_selection(ai: AI, yaml_string: str) -> FileSelection: + system_prompt = """## Explanation +You are a fuzzy yaml parser, who correctly parses yaml even if it is not strictly valid. + +A user has been given a yaml representation of a file structure, represented like so: + +``` +.github: + ISSUE_TEMPLATE: + - bug-report.md + - documentation-clarification.md + - feature-request.md + PULL_REQUEST_TEMPLATE: + - PULL_REQUEST_TEMPLATE.md + workflows: + - automation.yml + - ci.yaml + - pre-commit.yaml + - release.yaml + (./): + - CODEOWNERS + - CODE_OF_CONDUCT.md + - CONTRIBUTING.md + - FUNDING.yml +``` + +Folders are represented as keys in a dictionary, files are items in a list. Any files listed under the (./) key can be assumed to be files of the folder above that. + +The given example maps to these file paths: + +``` +".github/ISSUE_TEMPLATE/bug-report.md", +".github/ISSUE_TEMPLATE/documentation-clarification.md", +".github/ISSUE_TEMPLATE/feature-request.md", +".github/PULL_REQUEST_TEMPLATE/PULL_REQUEST_TEMPLATE.md", +".github/workflows/automation.yml", +".github/workflows/ci.yaml", +".github/workflows/pre-commit.yaml", +".github/workflows/release.yaml", +".github/CODEOWNERS", +".github/CODE_OF_CONDUCT.md", +".github/CONTRIBUTING.md", +".github/FUNDING.yml", +``` + +An example of the yaml file after commenting might be something like this: + +``` +.github: + # ISSUE_TEMPLATE: + # - bug-report.md + # - documentation-clarification.md + # - feature-request.md + # PULL_REQUEST_TEMPLATE: + # - PULL_REQUEST_TEMPLATE.md + workflows: + - automation.yml + - ci.yaml + - pre-commit.yaml + - release.yaml + # (./): + # - CODEOWNERS + - CODE_OF_CONDUCT.md + - CONTRIBUTING.md + # - FUNDING.yml +``` + +This would convert into: + +``` +{ + "included_files": [ + ".github/workflows/automation.yml", + ".github/workflows/ci.yaml", + ".github/workflows/pre-commit.yaml", + ".github/workflows/release.yaml", + ".github/CODE_OF_CONDUCT.md", + ".github/CONTRIBUTING.md" + ], + "excluded_files": [ + ".github/ISSUE_TEMPLATE/bug-report.md", + ".github/ISSUE_TEMPLATE/documentation-clarification.md", + ".github/ISSUE_TEMPLATE/feature-request.md", + ".github/PULL_REQUEST_TEMPLATE/PULL_REQUEST_TEMPLATE.md", + ".github/CODEOWNERS", + ".github/FUNDING.yml" + ] +} +``` -- folder1/: - - folder2/: - # - file1 - # - file2 - - folder3/: - - file3 - # - file4 - - file5 -# - file6 -- file7 +Although the commmented content wasnt strictly correct yaml, their intentions were clear. They wanted to retain the files in the workflow folder aswell as the code of conduct and contributing guides -Although this isnt strictly correct yaml, their intentions are clear. +Based on commented yaml inputs such as this, your job is to output JSON, indicating which files have been included and which have been excluded. -Your job is to return the list of included files, and the list of excluded files as json. +Excluded files are always commented out with a # like in the above example. -The json you should return will be like this: +The json you should return will be like this: +``` { "included_files": [ "folder1/file5", @@ -179,16 +244,232 @@ def fuzzy_parse_yaml_files(ai: AI, yaml_string: str) -> str: "folder1/file5", ] } +``` Files can only be included or excluded, not both. If you are confused about the state of a file make your best guess - and if you really arent sure then mark it as included. + +Respond in JSON and nothing else. + +## Examples + +Example 1: + +Input: + +.github: + ISSUE_TEMPLATE: + - bug_report.md + - feature_request.md + PULL_REQUEST_TEMPLATE: + - pull_request_template.md + # workflows: + # - ci.yml + # - release.yml + +Output: + +{ + "included_files": [ + ".github/ISSUE_TEMPLATE/bug_report.md", + ".github/ISSUE_TEMPLATE/feature_request.md", + ".github/PULL_REQUEST_TEMPLATE/pull_request_template.md" + ], + "excluded_files": [ + ".github/workflows/ci.yml", + ".github/workflows/release.yml" + ] +} + +Example 2: + +Input: + +source: + # controllers: + # - MainController.cs + # - AuthController.cs + models: + - User.cs + - Post.cs + views: + Home: + - Index.cshtml + # - About.cshtml + Auth: + - Login.cshtml + - Register.cshtml + (./): + - Dockerfile + +Output: + +{ + "included_files": [ + "source/models/User.cs", + "source/models/Post.cs", + "source/views/Home/Index.cshtml", + "source/views/Auth/Login.cshtml", + "source/views/Auth/Register.cshtml" + "source/Dockerfile", + ], + "excluded_files": [ + "source/controllers/MainController.cs", + "source/controllers/AuthController.cs", + "source/views/Home/About.cshtml" + ] +} + +Example 3: + +Input: + +src: + main: + java: + com: + example: + # controllers: + # - UserController.java + # - PostController.java + models: + - User.java + - Post.java + # repositories: + # - UserRepository.java + # - PostRepository.java + services: + - UserService.java + - PostService.java + resources: + - application.properties + test: + java: + com: + example: + controllers: + - UserControllerTest.java + - PostControllerTest.java + (./): + - pom.xml + - Dockerfile + +Output: + +{ + "included_files": [ + "src/main/java/com/example/models/User.java", + "src/main/java/com/example/models/Post.java", + "src/main/java/com/example/services/UserService.java", + "src/main/java/com/example/services/PostService.java", + "src/main/resources/application.properties", + "src/test/java/com/example/controllers/UserControllerTest.java", + "src/test/java/com/example/controllers/PostControllerTest.java", + "pom.xml", + "Dockerfile" + ], + "excluded_files": [ + "src/main/java/com/example/controllers/UserController.java", + "src/main/java/com/example/controllers/PostController.java", + "src/main/java/com/example/repositories/UserRepository.java", + "src/main/java/com/example/repositories/PostRepository.java" + ] +} + +Example 4: + +Input: + + +app: + # controllers: + # - application_controller.rb + # - users_controller.rb + models: + - user.rb + - post.rb + views: + layouts: + - application.html.erb + users: + - index.html.erb + - show.html.erb + posts: + - index.html.erb + # - show.html.erb + (./): + - Gemfile + - config +config: + environments: + - development.rb + - test.rb + # - production.rb + initializers: + - application_controller_renderer.rb + locales: + - en.yml + # routes.rb +db: + migrate: + - 20211025120523_create_users.rb + - 20211025120530_create_posts.rb +test: + fixtures: + - users.yml + - posts.yml + # controllers: + # - users_controller_test.rb + # - posts_controller_test.rb + models: + - user_test.rb + - post_test.rb + + +Output: + +{ + "included_files": [ + "app/models/user.rb", + "app/models/post.rb", + "app/views/layouts/application.html.erb", + "app/views/users/index.html.erb", + "app/views/users/show.html.erb", + "app/views/posts/index.html.erb", + "app/Gemfile", + "config/environments/development.rb", + "config/environments/test.rb", + "config/initializers/application_controller_renderer.rb", + "config/locales/en.yml", + "db/migrate/20211025120523_create_users.rb", + "db/migrate/20211025120530_create_posts.rb", + "test/fixtures/users.yml", + "test/fixtures/posts.yml", + "test/models/user_test.rb", + "test/models/post_test.rb" + ], + "excluded_files": [ + "app/controllers/application_controller.rb", + "app/controllers/users_controller.rb", + "app/views/posts/show.html.erb", + "config/environments/production.rb", + "config/routes.rb", + "test/controllers/users_controller_test.rb", + "test/controllers/posts_controller_test.rb" + ] +} + +## IMPORTANT +Remember any line that is commented is an excluded file. Any line that is NOT commented - is an included file. """ - # ai.llm.callbacks.clear() # silent + # ai.llm.callbacks.clear() # silent messages = ai.start(system_prompt, yaml_string, step_name="fuzzy-parse-yaml") # ai.llm.callbacks.append(StreamingStdOutCallbackHandler()) - xml = messages[-1].content.strip() + json_string = messages[-1].content.strip() - return parse_task_xml_to_class(xml).tasks + data = json.loads(json_string) + + return FileSelection(data["included_files"], data["excluded_files"]) diff --git a/gpt_engineer/applications/interactive_cli/main.py b/gpt_engineer/applications/interactive_cli/main.py index b6835b7c16..44baa4fe33 100644 --- a/gpt_engineer/applications/interactive_cli/main.py +++ b/gpt_engineer/applications/interactive_cli/main.py @@ -1,10 +1,10 @@ import typer - -from agent import FeatureAgent from dotenv import load_dotenv -from feature import Feature -from repository import Repository -from settings import Settings + +from gpt_engineer.applications.interactive_cli.agent import FeatureAgent +from gpt_engineer.applications.interactive_cli.feature import Feature +from gpt_engineer.applications.interactive_cli.repository import Repository +from gpt_engineer.applications.interactive_cli.domain import Settings from gpt_engineer.core.ai import AI diff --git a/gpt_engineer/applications/interactive_cli/settings.py b/gpt_engineer/applications/interactive_cli/settings.py deleted file mode 100644 index fa12988c65..0000000000 --- a/gpt_engineer/applications/interactive_cli/settings.py +++ /dev/null @@ -1,3 +0,0 @@ -class Settings: - def __init__(self, no_branch: bool = False): - self.no_branch = no_branch diff --git a/tests/applications/interactive_cli/test_file_selection.py b/tests/applications/interactive_cli/test_file_selection.py index 6a069e58cf..a9777c64cf 100644 --- a/tests/applications/interactive_cli/test_file_selection.py +++ b/tests/applications/interactive_cli/test_file_selection.py @@ -1,118 +1,203 @@ -import os -import shutil -import tempfile -import unittest - -from gpt_engineer.applications.interactive_cli.file_selection import FileSelection - - -class MockRepository: - def __init__(self, files): - self.files = files - - def get_tracked_files(self): - return self.files - - -class TestFileSelection(unittest.TestCase): - - def setUp(self): - # Create a temporary directory for the test - self.test_dir = tempfile.mkdtemp() - self.project_path = self.test_dir - os.makedirs(os.path.join(self.project_path, ".feature"), exist_ok=True) - - # Initial file structure for the mock repository - self.initial_files = [ - "folder1/file1", - "folder1/file2", - "folder1/folder2/file3", - "folder1/folder2/file4", - "file5", - "file6", - ] - self.repository = MockRepository(self.initial_files) - - # Initialize the FileSelection object - self.file_selection = FileSelection(self.project_path, self.repository) - - def tearDown(self): - # Remove the temporary directory after the test - shutil.rmtree(self.test_dir) - - def test_lifecycle(self): - # Step 1: Create YAML file from the mock repository - self.file_selection._initialize() - expected_yaml_initial = """# Complete list of files shared with the AI -# Please comment out any files not needed as context for this change -# This saves money and avoids overwhelming the AI -- folder1/: - - folder2/: - - file3 - - file4 - - file1 - - file2 -- file5 -- file6 +import yaml +import pytest +from dotenv import load_dotenv + +from gpt_engineer.core.ai import AI + +from gpt_engineer.applications.interactive_cli.file_selection import ( + FileSelection, + paths_to_tree, + tree_to_paths, + paths_to_tree, + file_selection_to_commented_yaml, + commented_yaml_to_file_selection, +) + +from gpt_engineer.applications.interactive_cli.generation_tools import ( + fuzzy_parse_file_selection, +) + + +def test_tree_conversion(): + original_paths = [ + ".github/ISSUE_TEMPLATE/bug-report.md", + ".github/ISSUE_TEMPLATE/documentation-clarification.md", + ".github/ISSUE_TEMPLATE/feature-request.md", + ".github/PULL_REQUEST_TEMPLATE/PULL_REQUEST_TEMPLATE.md", + ".github/workflows/automation.yml", + ".github/workflows/ci.yaml", + ".github/workflows/pre-commit.yaml", + ".github/workflows/release.yaml", + ".github/CODEOWNERS", + ".github/CODE_OF_CONDUCT.md", + ".github/CONTRIBUTING.md", + ".github/FUNDING.yml", + "docker/Dockerfile", + "docker/README.md", + "docker/entrypoint.sh", + "docs/examples/open_llms/README.md", + "docs/examples/open_llms/langchain_interface.py", + ] + + tree = paths_to_tree(original_paths) + reconstructed_paths = tree_to_paths(tree) + + assert sorted(original_paths) == sorted( + reconstructed_paths + ), "The file paths do not match after conversion!" + + +def test_tree_conversion_yaml(): + original_paths = [ + ".github/ISSUE_TEMPLATE/bug-report.md", + ".github/ISSUE_TEMPLATE/documentation-clarification.md", + ".github/ISSUE_TEMPLATE/feature-request.md", + ".github/PULL_REQUEST_TEMPLATE/PULL_REQUEST_TEMPLATE.md", + ".github/workflows/automation.yml", + ".github/workflows/ci.yaml", + ".github/workflows/pre-commit.yaml", + ".github/workflows/release.yaml", + ".github/CODEOWNERS", + ".github/CODE_OF_CONDUCT.md", + ".github/CONTRIBUTING.md", + ".github/FUNDING.yml", + "docker/Dockerfile", + "docker/README.md", + "docker/entrypoint.sh", + "docs/examples/open_llms/README.md", + "docs/examples/open_llms/langchain_interface.py", + ] + + tree = paths_to_tree(original_paths) + yaml_tree = yaml.dump(tree) + read_tree = yaml.safe_load(yaml_tree) + reconstructed_paths = tree_to_paths(read_tree) + + assert sorted(original_paths) == sorted( + reconstructed_paths + ), "The file paths do not match after conversion!" + + +def test_file_selection_to_yaml(): + included_files = [ + "docker/Dockerfile", + "docker/README.md", + "docker/entrypoint.sh", + ] + + excluded_files = [ + ".github/ISSUE_TEMPLATE/bug-report.md", + ".github/ISSUE_TEMPLATE/documentation-clarification.md", + ".github/ISSUE_TEMPLATE/feature-request.md", + ".github/PULL_REQUEST_TEMPLATE/PULL_REQUEST_TEMPLATE.md", + ".github/workflows/automation.yml", + ".github/workflows/ci.yaml", + ".github/workflows/pre-commit.yaml", + ".github/workflows/release.yaml", + ".github/CODEOWNERS", + ".github/CODE_OF_CONDUCT.md", + ".github/CONTRIBUTING.md", + ".github/FUNDING.yml", + "docs/examples/open_llms/README.md", + "docs/examples/open_llms/langchain_interface.py", + ] + + commented_yaml = file_selection_to_commented_yaml( + FileSelection(included_files, excluded_files) + ) + + assert ( + commented_yaml + == """.github: + ISSUE_TEMPLATE: +# - bug-report.md +# - documentation-clarification.md +# - feature-request.md + PULL_REQUEST_TEMPLATE: +# - PULL_REQUEST_TEMPLATE.md + workflows: +# - automation.yml +# - ci.yaml +# - pre-commit.yaml +# - release.yaml + (./): +# - CODEOWNERS +# - CODE_OF_CONDUCT.md +# - CONTRIBUTING.md +# - FUNDING.yml +docker: +- Dockerfile +- README.md +- entrypoint.sh +docs: + examples: + open_llms: +# - README.md +# - langchain_interface.py """ - with open(self.file_selection.yaml_path, "r") as file: - initial_yaml_content = file.read() - - self.assertEqual(initial_yaml_content, expected_yaml_initial) - - # Step 2: Update the YAML file directly (simulating user comments) - edited_yaml_content = """# Complete list of files shared with the AI -# Please comment out any files not needed as context for this change -# This saves money and avoids overwhelming the AI -- folder1/: - - folder2/: - # - file3 - # - file4 - # - file1 - - file2 -# - file5 -- file6 -""" - with open(self.file_selection.yaml_path, "w") as file: - file.write(edited_yaml_content) - - # Step 3: Update tracked files in the repository and update the YAML file - new_files = [ - "folder1/file1", - "folder1/file2", - "folder1/folder2/file3", - "folder1/folder2/file4", - "file5", - "file6", - "newfile7", - ] - self.repository.files = new_files - self.file_selection.update_yaml_from_tracked_files() - - expected_yaml_updated = """# Complete list of files shared with the AI -# Please comment out any files not needed as context for this change -# This saves money and avoids overwhelming the AI -- folder1/: - - folder2/: - # - file3 - # - file4 - # - file1 - - file2 -# - file5 -- file6 -- newfile7 -""" - with open(self.file_selection.yaml_path, "r") as file: - updated_yaml_content = file.read() - - self.assertEqual(updated_yaml_content, expected_yaml_updated) - - # Step 4: Get files from YAML and verify - selected_files = self.file_selection.get_from_yaml() - expected_selected_files = ["folder1/file2", "file6", "newfile7"] - - self.assertEqual(selected_files, expected_selected_files) - - -if __name__ == "__main__": - unittest.main() + ) + + +def test_yaml_to_file_selection(): + included_files = [ + "docker/Dockerfile", + "docker/README.md", + "docker/entrypoint.sh", + ] + + excluded_files = [ + ".github/ISSUE_TEMPLATE/bug-report.md", + ".github/ISSUE_TEMPLATE/documentation-clarification.md", + ".github/ISSUE_TEMPLATE/feature-request.md", + ".github/PULL_REQUEST_TEMPLATE/PULL_REQUEST_TEMPLATE.md", + ".github/workflows/automation.yml", + ".github/workflows/ci.yaml", + ".github/workflows/pre-commit.yaml", + ".github/workflows/release.yaml", + ".github/CODEOWNERS", + ".github/CODE_OF_CONDUCT.md", + ".github/CONTRIBUTING.md", + ".github/FUNDING.yml", + "docs/examples/open_llms/README.md", + "docs/examples/open_llms/langchain_interface.py", + ] + + commented_yaml = file_selection_to_commented_yaml( + FileSelection(included_files, excluded_files) + ) + + file_selection = commented_yaml_to_file_selection(commented_yaml) + + assert sorted(file_selection.included_files) == sorted(included_files) + assert sorted(file_selection.excluded_files) == sorted(excluded_files) + + +@pytest.mark.skip(reason="Skipping as test requires AI") +def test_yaml_to_file_selection_fuzzy(): + + load_dotenv() + + commented_yaml = """# gpt_engineer: +# applications: +# cli: + - __init__.py + - cli_agent.py +# - collect.py + - file_selector.py + - learning.py + - main.py""" + + file_selction = fuzzy_parse_file_selection(AI(), commented_yaml) + + assert file_selction == FileSelection( + [ + "gpt_engineer/applications/cli/__init__.py", + "gpt_engineer/applications/cli/cli_agent.py", + "gpt_engineer/applications/cli/file_selector.py", + "gpt_engineer/applications/cli/learning.py", + "gpt_engineer/applications/cli/main.py", + ], + [ + "gpt_engineer/applications/cli/collect.py", + ], + ) From 646a5bd69b531e47fab3fab7750d8b8d0c72aa10 Mon Sep 17 00:00:00 2001 From: Theo McCabe Date: Tue, 21 May 2024 21:04:17 +0100 Subject: [PATCH 24/36] working to the end of first task --- .../interactive_cli/agent_steps.py | 4 +- .../interactive_cli/file_selection.py | 48 +++++++++---------- .../interactive_cli/generation_tools.py | 16 +++---- gpt_engineer/core/ai.py | 2 +- gpt_engineer/core/token_usage.py | 10 ++-- 5 files changed, 37 insertions(+), 43 deletions(-) diff --git a/gpt_engineer/applications/interactive_cli/agent_steps.py b/gpt_engineer/applications/interactive_cli/agent_steps.py index 34e5350a97..431778509b 100644 --- a/gpt_engineer/applications/interactive_cli/agent_steps.py +++ b/gpt_engineer/applications/interactive_cli/agent_steps.py @@ -136,7 +136,9 @@ def run_task_loop( prompt = Prompt(feature.get_task(), prefix="Task: ") - files = Files(project_path, file_selector.get_from_yaml()) + selected_files = file_selector.get_from_yaml().included_files + + files = Files(project_path, selected_files) improve_lambda = lambda: improve_fn( ai, prompt, files, memory, preprompts_holder, context_string diff --git a/gpt_engineer/applications/interactive_cli/file_selection.py b/gpt_engineer/applications/interactive_cli/file_selection.py index 1b7e18863b..9f983b893e 100644 --- a/gpt_engineer/applications/interactive_cli/file_selection.py +++ b/gpt_engineer/applications/interactive_cli/file_selection.py @@ -135,7 +135,7 @@ def comment_marked_files(yaml_content): updated_lines = [] for line in lines: if "#" in line: - line = "#" + line.replace("#", "").replace("'", "") + line = line.replace("- '#", "#- ").replace("'", "") updated_lines.append(line) return "\n".join(updated_lines) @@ -154,19 +154,30 @@ def __init__(self, project_path: str, repository: Repository): self.ai = AI("gpt-4o", temperature=0) self.repository = repository self.yaml_path = os.path.join(project_path, ".feature", "files.yml") - self._initialize() + + if os.path.exists(self.yaml_path): + return + + print("YAML file is missing or empty, generating YAML...") + + file_selection = FileSelection([], self.repository.get_tracked_files()) + + self.set_to_yaml(file_selection) def _write_yaml_with_header(self, yaml_content): + def add_indentation(content): lines = content.split("\n") new_lines = [] last_key = None for line in lines: - stripped_line = line.strip() + stripped_line = line.replace("#", "").strip() if stripped_line.endswith(":"): last_key = stripped_line if stripped_line.startswith("- ") and (last_key != "(./):"): + # add 2 spaces at the begining of line or after any # + new_lines.append(" " + line) # Add extra indentation else: new_lines.append(line) @@ -175,9 +186,9 @@ def add_indentation(content): indented_content = add_indentation(yaml_content) with open(self.yaml_path, "w") as file: file.write( - f"""# Complete list of files shared with the AI -# Please comment out any files not needed as context for this change -# This saves money and avoids overwhelming the AI + f"""# Uncomment any files you would like to use for this feature +# Note that (./) is a special key which represents files at the root of the parent directory + {indented_content}""" ) @@ -187,20 +198,6 @@ def _read_yaml_with_headers(self): return "".join(original_content_lines) - def _initialize(self): - """ - Generates a YAML file from the tracked files if one doesnt exist - """ - - if os.path.exists(self.yaml_path): - return - - print("YAML file is missing or empty, generating YAML...") - - tree = paths_to_tree(self.repository.get_tracked_files()) - - self._write_yaml_with_header(yaml.dump(tree, sort_keys=False, indent=2)) - def set_to_yaml(self, file_selection): commented_yaml = file_selection_to_commented_yaml(file_selection) @@ -218,8 +215,6 @@ def update_yaml_from_tracked_files(self): file_selection = self.get_from_yaml() - print(file_selection.excluded_files) - # If there are no changes, do nothing if set(tracked_files) == set( file_selection.included_files + file_selection.excluded_files @@ -230,7 +225,9 @@ def update_yaml_from_tracked_files(self): set(tracked_files) - set(file_selection.excluded_files) ) - self._set_to_yaml(new_included_files, file_selection.excluded_files) + self.set_to_yaml( + FileSelection(new_included_files, file_selection.excluded_files) + ) def get_from_yaml(self) -> FileSelection: """ @@ -256,7 +253,7 @@ def get_pretty_selected_from_yaml(self) -> str: Retrieves selected file paths from the YAML file and prints them in an ASCII-style tree structure. """ # Get selected files from YAML - selected_files = self.get_from_yaml() + file_selection = self.get_from_yaml() # Helper function to insert a path into the tree dictionary def insert_path(tree, path_parts): @@ -267,9 +264,8 @@ def insert_path(tree, path_parts): tree[path_parts[0]] = {} insert_path(tree[path_parts[0]], path_parts[1:]) - # Create a nested dictionary from the list of file paths file_tree = {} - for filepath in selected_files: + for filepath in file_selection.included_files: parts = filepath.split("/") insert_path(file_tree, parts) diff --git a/gpt_engineer/applications/interactive_cli/generation_tools.py b/gpt_engineer/applications/interactive_cli/generation_tools.py index d5c27bff96..a498af3d1d 100644 --- a/gpt_engineer/applications/interactive_cli/generation_tools.py +++ b/gpt_engineer/applications/interactive_cli/generation_tools.py @@ -138,7 +138,6 @@ def fuzzy_parse_file_selection(ai: AI, yaml_string: str) -> FileSelection: A user has been given a yaml representation of a file structure, represented like so: -``` .github: ISSUE_TEMPLATE: - bug-report.md @@ -156,13 +155,11 @@ def fuzzy_parse_file_selection(ai: AI, yaml_string: str) -> FileSelection: - CODE_OF_CONDUCT.md - CONTRIBUTING.md - FUNDING.yml -``` Folders are represented as keys in a dictionary, files are items in a list. Any files listed under the (./) key can be assumed to be files of the folder above that. The given example maps to these file paths: -``` ".github/ISSUE_TEMPLATE/bug-report.md", ".github/ISSUE_TEMPLATE/documentation-clarification.md", ".github/ISSUE_TEMPLATE/feature-request.md", @@ -175,11 +172,10 @@ def fuzzy_parse_file_selection(ai: AI, yaml_string: str) -> FileSelection: ".github/CODE_OF_CONDUCT.md", ".github/CONTRIBUTING.md", ".github/FUNDING.yml", -``` An example of the yaml file after commenting might be something like this: -``` + .github: # ISSUE_TEMPLATE: # - bug-report.md @@ -197,11 +193,10 @@ def fuzzy_parse_file_selection(ai: AI, yaml_string: str) -> FileSelection: - CODE_OF_CONDUCT.md - CONTRIBUTING.md # - FUNDING.yml -``` + This would convert into: -``` { "included_files": [ ".github/workflows/automation.yml", @@ -220,7 +215,7 @@ def fuzzy_parse_file_selection(ai: AI, yaml_string: str) -> FileSelection: ".github/FUNDING.yml" ] } -``` + Although the commmented content wasnt strictly correct yaml, their intentions were clear. They wanted to retain the files in the workflow folder aswell as the code of conduct and contributing guides @@ -230,7 +225,6 @@ def fuzzy_parse_file_selection(ai: AI, yaml_string: str) -> FileSelection: The json you should return will be like this: -``` { "included_files": [ "folder1/file5", @@ -244,7 +238,6 @@ def fuzzy_parse_file_selection(ai: AI, yaml_string: str) -> FileSelection: "folder1/file5", ] } -``` Files can only be included or excluded, not both. If you are confused about the state of a file make your best guess - and if you really arent sure then mark it as included. @@ -470,6 +463,9 @@ def fuzzy_parse_file_selection(ai: AI, yaml_string: str) -> FileSelection: json_string = messages[-1].content.strip() + # strip anything before first { and after last } + json_string = json_string[json_string.find("{") : json_string.rfind("}") + 1] + data = json.loads(json_string) return FileSelection(data["included_files"], data["excluded_files"]) diff --git a/gpt_engineer/core/ai.py b/gpt_engineer/core/ai.py index 5db26eb439..8672fe8a8b 100644 --- a/gpt_engineer/core/ai.py +++ b/gpt_engineer/core/ai.py @@ -87,7 +87,7 @@ class AI: def __init__( self, - model_name="gpt-4-turbo", + model_name="gpt-4o", temperature=0.1, azure_endpoint=None, streaming=True, diff --git a/gpt_engineer/core/token_usage.py b/gpt_engineer/core/token_usage.py index b10fec9033..b51b937e31 100644 --- a/gpt_engineer/core/token_usage.py +++ b/gpt_engineer/core/token_usage.py @@ -71,11 +71,11 @@ class Tokenizer: def __init__(self, model_name): self.model_name = model_name - self._tiktoken_tokenizer = ( - tiktoken.encoding_for_model(model_name) - if "gpt-4" in model_name or "gpt-3.5" in model_name - else tiktoken.get_encoding("cl100k_base") - ) + + try: + self._tiktoken_tokenizer = tiktoken.encoding_for_model(model_name) + except: + self._tiktoken_tokenizer = tiktoken.get_encoding("cl100k_base") def num_tokens(self, txt: str) -> int: """ From d9e2a40537a844126c8bf022ed02bc06810d1717 Mon Sep 17 00:00:00 2001 From: Theo McCabe Date: Tue, 21 May 2024 22:14:49 +0100 Subject: [PATCH 25/36] basically working --- .../applications/interactive_cli/agent.py | 12 +-- .../interactive_cli/agent_steps.py | 87 ++++++++++--------- .../interactive_cli/repository.py | 11 +++ 3 files changed, 59 insertions(+), 51 deletions(-) diff --git a/gpt_engineer/applications/interactive_cli/agent.py b/gpt_engineer/applications/interactive_cli/agent.py index e20667ad83..3d4e030ee9 100644 --- a/gpt_engineer/applications/interactive_cli/agent.py +++ b/gpt_engineer/applications/interactive_cli/agent.py @@ -6,9 +6,8 @@ initialize_new_feature, update_user_file_selection, check_for_unstaged_changes, - confirm_feature_context_and_task_with_user, run_task_loop, - adjust_feature_task_or_files, + run_adjust_loop, update_task_description, ) @@ -49,14 +48,7 @@ def init(self, settings: Settings): def resume(self, settings: Settings): - implement = False - - while not implement: - implement = confirm_feature_context_and_task_with_user( - self.feature, self.file_selector - ) - - adjust_feature_task_or_files() + run_adjust_loop(self.feature, self.file_selector) check_for_unstaged_changes(self.repository) diff --git a/gpt_engineer/applications/interactive_cli/agent_steps.py b/gpt_engineer/applications/interactive_cli/agent_steps.py index 431778509b..8c1d03e12a 100644 --- a/gpt_engineer/applications/interactive_cli/agent_steps.py +++ b/gpt_engineer/applications/interactive_cli/agent_steps.py @@ -67,11 +67,11 @@ def update_task_description(feature: Feature): def check_for_unstaged_changes( repository: Repository, ): - git_context = repository.get_git_context() + unstaged_changes = repository.get_unstaged_changes() - if git_context.unstaged_changes: + if unstaged_changes: if input( - "Unstaged changes present are you sure you want to proceed? y/n" + "Unstaged changes present are you sure you want to proceed? y/n: " ).lower() not in ["", "y", "yes"]: print("Ok, not proceeding.") return @@ -101,24 +101,14 @@ def confirm_feature_context_and_task_with_user( return False -def complete_task(): - pass - - # feature.complete task - # then - - +# todo : create a function which uses the test4.py example code approach to offer a selection of options to the user +# f - "edit feature" using update_feature_description step +# s - "edit file selection" using update_user_file_selection step +# t - "edit task" using update_task_description step +# c - complete the task and start a new one +# x - exit def adjust_feature_task_or_files(): - pass - # todo : create a function which uses the test4.py example code approach to offer a selection of options to the user - - # c - complete the task and start a new one - - # f - "edit feature" using update_feature_description step - # s - "edit file selection" using update_user_file_selection step - # t - "edit task" using update_task_description step - - # + input("Please edit the prompt files and then press Enter to continue...") def run_task_loop( @@ -145,6 +135,7 @@ def run_task_loop( ) print("\n---- begining code generation ----\n") + # Creates loop updated_files_dictionary = handle_improve_mode(improve_lambda, memory) print("\n---- ending code generation ----\n") @@ -153,6 +144,35 @@ def run_task_loop( review_changes(project_path, feature, repository, ai, file_selector) +def run_adjust_loop(feature, file_selector): + implement = confirm_feature_context_and_task_with_user(feature, file_selector) + + while not implement: + adjust_feature_task_or_files() + implement = confirm_feature_context_and_task_with_user(feature, file_selector) + + +def run_task(repository, project_path, feature, ai, file_selector): + print("Rerunning generation...") + check_for_unstaged_changes(repository) + run_task_loop(project_path, feature, repository, ai, file_selector) + + +def complete_task(repository, project_path, feature, ai, file_selector): + print("Completing task... ") + repository.stage_all_changes() + feature.complete_task() + file_selector.update_yaml_from_tracked_files() + print("Continuing with next task...") + update_task_description(feature) + + run_adjust_loop(feature, file_selector) + + check_for_unstaged_changes(repository) + + run_task_loop(project_path, feature, repository, ai, file_selector) + + def review_changes( project_path, feature: Feature, @@ -168,32 +188,17 @@ def review_changes( result = session.prompt( """Please review the unstaged changes generated by GPT Engineer.. -r: Delete and retry the generation (incorporating changes to prompt files) +r: Retry the task (incorporating changes to prompt files) c: Complete task and stage changes -u: Undo changes and exit +x: Exit """, completer=completer, ).lower() if result == "r": - print("Deleting changes and rerunning generation...") - repository.undo_unstaged_changes() - run_task_loop(project_path, feature, repository, ai, file_selector) - + rerun_task(repository, project_path, feature, ai, file_selector) if result == "c": - print("Completing task... ") - repository.stage_all_changes() - feature.complete_task() - file_selector.update_yaml_from_tracked_files() - if cli_input("Do you want to start a new task? y/n: ").lower() in [ - "y", - "yes", - ]: - update_task_description(feature) - run_task_loop(project_path, feature, repository, ai, file_selector) - return - - if result == "u": - print("Undo the last operation.") - repository.undo_unstaged_changes() + complete_task(repository, project_path, feature, ai, file_selector) + if result == "x": + print("exiting...") return diff --git a/gpt_engineer/applications/interactive_cli/repository.py b/gpt_engineer/applications/interactive_cli/repository.py index 18b681c35f..f6d7d9c054 100644 --- a/gpt_engineer/applications/interactive_cli/repository.py +++ b/gpt_engineer/applications/interactive_cli/repository.py @@ -85,6 +85,17 @@ def get_feature_branch_diff(self): print(f"Error generating diff: {e}") return "" + def get_unstaged_changes(self): + """ + Get the unstaged changes in the repository. + + Returns + ------- + str + The unstaged changes in the repository. + """ + return self.repo.git.diff() + def get_git_context(self): staged_changes = self.repo.git.diff("--cached") unstaged_changes = self.repo.git.diff() From f5e38fe488ea9e63d0238a8f595a375bf4d37e7f Mon Sep 17 00:00:00 2001 From: Theo McCabe Date: Wed, 22 May 2024 17:44:08 +0100 Subject: [PATCH 26/36] WIP --- .../interactive_cli/agents/__init__.py | 0 .../{ => agents}/agent_steps.py | 13 +- .../interactive_cli/agents/chat_agent.py | 1 + .../{agent.py => agents/feature_agent.py} | 7 +- .../interactive_cli/agents/task_agent.py | 1 + .../cli-code_samples/edit_file_tree.py | 102 ------ .../cli-code_samples/edit_yaml_paths.py | 87 ----- .../interactive_cli/cli-code_samples/test1.py | 15 - .../interactive_cli/cli-code_samples/test2.py | 30 -- .../interactive_cli/cli-code_samples/test3.py | 34 -- .../interactive_cli/cli-code_samples/test4.py | 36 -- .../applications/interactive_cli/feature.py | 17 +- .../interactive_cli/file_selection.py | 4 +- .../interactive_cli/generation_tools.py | 2 + .../applications/interactive_cli/main.py | 110 ++++-- .../interactive_cli/prompts/__init__.py | 0 .../interactive_cli/prompts/fuzzy_file_parser | 320 ++++++++++++++++++ 17 files changed, 433 insertions(+), 346 deletions(-) create mode 100644 gpt_engineer/applications/interactive_cli/agents/__init__.py rename gpt_engineer/applications/interactive_cli/{ => agents}/agent_steps.py (94%) create mode 100644 gpt_engineer/applications/interactive_cli/agents/chat_agent.py rename gpt_engineer/applications/interactive_cli/{agent.py => agents/feature_agent.py} (83%) create mode 100644 gpt_engineer/applications/interactive_cli/agents/task_agent.py delete mode 100644 gpt_engineer/applications/interactive_cli/cli-code_samples/edit_file_tree.py delete mode 100644 gpt_engineer/applications/interactive_cli/cli-code_samples/edit_yaml_paths.py delete mode 100644 gpt_engineer/applications/interactive_cli/cli-code_samples/test1.py delete mode 100644 gpt_engineer/applications/interactive_cli/cli-code_samples/test2.py delete mode 100644 gpt_engineer/applications/interactive_cli/cli-code_samples/test3.py delete mode 100644 gpt_engineer/applications/interactive_cli/cli-code_samples/test4.py create mode 100644 gpt_engineer/applications/interactive_cli/prompts/__init__.py create mode 100644 gpt_engineer/applications/interactive_cli/prompts/fuzzy_file_parser diff --git a/gpt_engineer/applications/interactive_cli/agents/__init__.py b/gpt_engineer/applications/interactive_cli/agents/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/gpt_engineer/applications/interactive_cli/agent_steps.py b/gpt_engineer/applications/interactive_cli/agents/agent_steps.py similarity index 94% rename from gpt_engineer/applications/interactive_cli/agent_steps.py rename to gpt_engineer/applications/interactive_cli/agents/agent_steps.py index 8c1d03e12a..b5839230e8 100644 --- a/gpt_engineer/applications/interactive_cli/agent_steps.py +++ b/gpt_engineer/applications/interactive_cli/agents/agent_steps.py @@ -37,11 +37,13 @@ def initialize_new_feature( update_feature_description(feature) - branch_name = generate_branch_name(ai, feature.get_description()) + if not no_branch: + print("Creating feature branch... (this can be disabled with -nb setting)") - branch_name = cli_input("\nConfirm branch name: ", default=branch_name) + branch_name = generate_branch_name(ai, feature.get_description()) + + branch_name = cli_input("\nConfirm branch name: ", default=branch_name) - if not no_branch: repository.create_branch(branch_name) print("\nFeature branch created.\n") @@ -178,7 +180,6 @@ def review_changes( feature: Feature, repository: Repository, ai: AI, - file_selector: FileSelector, ): completer = WordCompleter(["r", "c", "u"], ignore_case=True) @@ -196,9 +197,9 @@ def review_changes( ).lower() if result == "r": - rerun_task(repository, project_path, feature, ai, file_selector) + run_task(repository, project_path, feature, ai) if result == "c": - complete_task(repository, project_path, feature, ai, file_selector) + complete_task(repository, project_path, feature, ai) if result == "x": print("exiting...") return diff --git a/gpt_engineer/applications/interactive_cli/agents/chat_agent.py b/gpt_engineer/applications/interactive_cli/agents/chat_agent.py new file mode 100644 index 0000000000..d5573ba686 --- /dev/null +++ b/gpt_engineer/applications/interactive_cli/agents/chat_agent.py @@ -0,0 +1 @@ +# todo : write chat agent diff --git a/gpt_engineer/applications/interactive_cli/agent.py b/gpt_engineer/applications/interactive_cli/agents/feature_agent.py similarity index 83% rename from gpt_engineer/applications/interactive_cli/agent.py rename to gpt_engineer/applications/interactive_cli/agents/feature_agent.py index 3d4e030ee9..c732da7341 100644 --- a/gpt_engineer/applications/interactive_cli/agent.py +++ b/gpt_engineer/applications/interactive_cli/agents/feature_agent.py @@ -1,8 +1,7 @@ from gpt_engineer.applications.interactive_cli.feature import Feature -from gpt_engineer.applications.interactive_cli.file_selection import FileSelector from gpt_engineer.applications.interactive_cli.repository import Repository from gpt_engineer.applications.interactive_cli.domain import Settings -from gpt_engineer.applications.interactive_cli.agent_steps import ( +from gpt_engineer.applications.interactive_cli.agents.agent_steps import ( initialize_new_feature, update_user_file_selection, check_for_unstaged_changes, @@ -22,18 +21,14 @@ class FeatureAgent(BaseAgent): def __init__( self, - project_path: str, feature: Feature, repository: Repository, ai: AI = None, ): - self.project_path = project_path self.feature = feature self.repository = repository self.ai = ai or AI() - self.file_selector = FileSelector(project_path, repository) - def init(self, settings: Settings): initialize_new_feature( diff --git a/gpt_engineer/applications/interactive_cli/agents/task_agent.py b/gpt_engineer/applications/interactive_cli/agents/task_agent.py new file mode 100644 index 0000000000..78f7f1499c --- /dev/null +++ b/gpt_engineer/applications/interactive_cli/agents/task_agent.py @@ -0,0 +1 @@ +# todo : write task agent diff --git a/gpt_engineer/applications/interactive_cli/cli-code_samples/edit_file_tree.py b/gpt_engineer/applications/interactive_cli/cli-code_samples/edit_file_tree.py deleted file mode 100644 index 6ae78694a4..0000000000 --- a/gpt_engineer/applications/interactive_cli/cli-code_samples/edit_file_tree.py +++ /dev/null @@ -1,102 +0,0 @@ -from prompt_toolkit import PromptSession -from prompt_toolkit.application import Application -from prompt_toolkit.key_binding import KeyBindings -from prompt_toolkit.layout import Window -from prompt_toolkit.layout.containers import HSplit -from prompt_toolkit.layout.controls import FormattedTextControl -from prompt_toolkit.layout.layout import Layout -from prompt_toolkit.widgets import TextArea - - -def generate_file_tree(files): - """ - Generates a file tree from a list of file paths. - """ - tree = {} - for file in files: - parts = file.split("/") - node = tree - for part in parts: - node = node.setdefault(part, {}) - return tree - - -def generate_tree_string(node, prefix=""): - """ - Recursively generates a string representation of the file tree. - """ - lines = [] - items = list(node.items()) - for i, (key, subnode) in enumerate(items): - connector = "└─" if i == len(items) - 1 else "├─" - if subnode is not None: # Check if it's a directory or a commented directory - lines.append(f"{prefix}{connector} {key}/") - if subnode: # Only append sub-tree if it's not commented out - extension = " " if i == len(items) - 1 else "│ " - lines.extend(generate_tree_string(subnode, prefix + extension)) - else: # it's a file or commented file - lines.append(f"{prefix}{connector} {key}") - return lines - - -def get_editable_tree(files): - tree = generate_file_tree(files) - tree_lines = generate_tree_string(tree) - return "\n".join(tree_lines) - - -def interactive_edit_files(files): - PromptSession() - - # Generate editable file tree - editable_tree = get_editable_tree(files) - - # Text area for file tree - text_area = TextArea( - text=editable_tree, scrollbar=True, multiline=True, wrap_lines=False - ) - - # Ensure the text area starts in insert mode - # text_area.buffer.cursor_position += len(text_area.text) - text_area.buffer.insert_mode = False - - # Instructions wrapped in a Window - instructions = Window( - content=FormattedTextControl( - text="Please comment out unneeded files to reduce context overhead.\n" - 'You can comment out lines by adding "#" at the beginning of the line.\n' - "Press Ctrl-S to save and exit." - ), - height=3, # Adjust height as necessary - style="class:instruction", - ) - - # Container that holds both the instructions and the text area - instruction_container = HSplit([instructions, text_area]) - - # Create a layout out of the widget above - layout = Layout(instruction_container) - - # Add key bindings for custom actions like save - bindings = KeyBindings() - - @bindings.add("c-s") - def _(event): - # Saving functionality or further processing can be implemented here - print("Saving and processing your tree...") - event.app.exit() - - app = Application(layout=layout, key_bindings=bindings, full_screen=True) - app.run() - - -# Example usage -tracked_files = [ - "project/src/main/java/com/example/MyApp.java", - "project/src/main/resources/config.properties", - "project/src/test/java/com/example/MyAppTest.java", - "project/src/test/resources/testdata.txt", - "project/lib/external-library.jar", - "project/README.md", -] -interactive_edit_files(tracked_files) diff --git a/gpt_engineer/applications/interactive_cli/cli-code_samples/edit_yaml_paths.py b/gpt_engineer/applications/interactive_cli/cli-code_samples/edit_yaml_paths.py deleted file mode 100644 index f614f41fd6..0000000000 --- a/gpt_engineer/applications/interactive_cli/cli-code_samples/edit_yaml_paths.py +++ /dev/null @@ -1,87 +0,0 @@ -from pathlib import Path - -import yaml - -from prompt_toolkit import Application -from prompt_toolkit.key_binding import KeyBindings -from prompt_toolkit.layout import Layout -from prompt_toolkit.layout.containers import HSplit -from prompt_toolkit.widgets import Label, TextArea - - -def create_yaml_file(file_paths): - """Generates a YAML structure from a list of file paths.""" - root = {} - for file_path in file_paths: - parts = Path(file_path).parts - current = root - for part in parts: - current = current.setdefault(part, {}) - return yaml.dump(root, sort_keys=False) - - -def edit_yaml(yaml_content): - """Opens a Prompt Toolkit session to edit the YAML content.""" - kb = KeyBindings() - - # @kb.add("c-q") - # def exit_(event): - # "Press Control-Q to exit." - # event.app.exit() - - # @kb.add("c-c") - # def exit_(event): - # "Press Control-Q to exit." - # event.app.exit() - - # @kb.add("c-s") - # def save_exit(event): - # "Press Control-S to save and exit." - # with open("edited_yaml.yaml", "w") as f: - # f.write(text_area.text) - # print("File saved as 'edited_yaml.yaml'") - # event.app.exit() - - # @kb.add("c-t") - # def comment_uncomment(event): - # """Toggle comment on the current line with Ctrl-T.""" - # tb = text_area.buffer - # doc = tb.document - # line_text = doc.current_line_before_cursor + doc.current_line_after_cursor - # if line_text.strip().startswith("#"): - # tb.delete_before_cursor(len(line_text) - len(line_text.lstrip("#"))) - # else: - # tb.insert_text("#", move_cursor=False) - - text_area = TextArea( - text=yaml_content, - scrollbar=True, - multiline=True, - wrap_lines=False, - line_numbers=True, - ) - - # Instruction label - instructions = Label( - text="Use Ctrl-S to save and exit, Ctrl-Q to quit without saving, Ctrl-T to toggle comment.", - dont_extend_height=True, - ) - - # Combine text area and instructions label in a vertical layout - layout = Layout(HSplit([text_area, instructions])) - - app = Application(layout=layout, key_bindings=kb, full_screen=False) - app.run() - - -def main(file_paths): - """Generate a YAML file from file paths and open it for editing.""" - yaml_data = create_yaml_file(file_paths) - - edit_yaml(yaml_data) - - -# Example usage: -file_paths = ["/path/to/file1.txt", "/path/to/file2.txt", "/path/to/dir/file3.txt"] - -main(file_paths) diff --git a/gpt_engineer/applications/interactive_cli/cli-code_samples/test1.py b/gpt_engineer/applications/interactive_cli/cli-code_samples/test1.py deleted file mode 100644 index 6b6e166261..0000000000 --- a/gpt_engineer/applications/interactive_cli/cli-code_samples/test1.py +++ /dev/null @@ -1,15 +0,0 @@ -from prompt_toolkit import prompt - - -def main(): - branch_name_suggestion = "feat/name" - print("Great, sounds like a useful feature.") - branch_name = prompt( - "Please confirm or edit the feature branch name: ", - default=branch_name_suggestion, - ) - print(f"Creating feature branch: {branch_name}") - - -if __name__ == "__main__": - main() diff --git a/gpt_engineer/applications/interactive_cli/cli-code_samples/test2.py b/gpt_engineer/applications/interactive_cli/cli-code_samples/test2.py deleted file mode 100644 index a3024585f5..0000000000 --- a/gpt_engineer/applications/interactive_cli/cli-code_samples/test2.py +++ /dev/null @@ -1,30 +0,0 @@ -from prompt_toolkit.shortcuts import radiolist_dialog - - -def main(): - print("Use the arrow keys to navigate. Press Enter to select.") - tasks = [ - ("0", "Generate Whole Feature"), - ("1", "Task A - Create a view file for account page"), - ("2", "Task B - Make an API call to retrieve account information"), - ("3", "Enter a custom task"), - ] - - result = radiolist_dialog( - title="Suggested tasks", - text="Select the task to start with, or enter a custom task:", - values=tasks, - ).run() - - if result == "3": - from prompt_toolkit import prompt - - custom_task = prompt("Enter your custom task description: ") - print(f"You entered a custom task: {custom_task}") - else: - task_description = next((desc for key, desc in tasks if key == result), None) - print(f"You selected: {task_description}") - - -if __name__ == "__main__": - main() diff --git a/gpt_engineer/applications/interactive_cli/cli-code_samples/test3.py b/gpt_engineer/applications/interactive_cli/cli-code_samples/test3.py deleted file mode 100644 index 9ef776a307..0000000000 --- a/gpt_engineer/applications/interactive_cli/cli-code_samples/test3.py +++ /dev/null @@ -1,34 +0,0 @@ -from prompt_toolkit.shortcuts import radiolist_dialog - - -def main(): - print("Diff generated. Please Review, and stage the changes you want to keep.") - - # Defining the options for the user with radiolist dialog - result = radiolist_dialog( - title="Diff Review Options", - text="Please select your action:", - values=[ - ("r", "Retry"), - ("s", "Stage changes and continue"), - ("c", "Commit changes and continue"), - ("u", "Undo"), - ], - ).run() - - # Handle the user's choice - if result == "r": - print("You have chosen to retry the diff generation.") - # Add logic to retry generating the diff - elif result == "s": - print("You have chosen to stage the changes.") - # Add logic to stage changes - elif result == "c": - print("You have chosen to commit the changes.") - # Add logic to commit changes - else: - print("Operation cancelled.") - - -if __name__ == "__main__": - main() diff --git a/gpt_engineer/applications/interactive_cli/cli-code_samples/test4.py b/gpt_engineer/applications/interactive_cli/cli-code_samples/test4.py deleted file mode 100644 index 1dc23b0872..0000000000 --- a/gpt_engineer/applications/interactive_cli/cli-code_samples/test4.py +++ /dev/null @@ -1,36 +0,0 @@ -from prompt_toolkit import PromptSession -from prompt_toolkit.completion import WordCompleter - - -def main(): - print("Diff generated. Please Review, and stage the changes you want to keep.") - - # Define the options and create a completer with those options - completer = WordCompleter(["r", "s", "c", "u"], ignore_case=True) - session = PromptSession() - - # Using prompt to get user input - result = session.prompt( - "Please select your action \n r: Retry \n s: Stage \n c: Commit \n u: Undo \n\n", - completer=completer, - ).lower() - - # Handle the user's choice - if result == "r": - print("You have chosen to retry the diff generation.") - # Add logic to retry generating the diff - elif result == "s": - print("You have chosen to stage the changes.") - # Add logic to stage changes - elif result == "c": - print("You have chosen to commit the changes.") - # Add logic to commit changes - elif result == "u": - print("Undo the last operation.") - # Add logic to undo the last operation - else: - print("Invalid option selected, please run the program again.") - - -if __name__ == "__main__": - main() diff --git a/gpt_engineer/applications/interactive_cli/feature.py b/gpt_engineer/applications/interactive_cli/feature.py index 1c0344f1e9..c172ea994c 100644 --- a/gpt_engineer/applications/interactive_cli/feature.py +++ b/gpt_engineer/applications/interactive_cli/feature.py @@ -6,6 +6,8 @@ from typing import Union from gpt_engineer.core.default.disk_memory import DiskMemory +from gpt_engineer.applications.interactive_cli.file_selection import FileSelector +from gpt_engineer.applications.interactive_cli.repository import Repository class Feature(DiskMemory): @@ -17,12 +19,17 @@ class Feature(DiskMemory): and progress (history of incremental work completed) """ - def __init__(self, project_path: Union[str, Path]): + def __init__(self, project_path: Union[str, Path], repository: Repository): - self.feature_path = Path(project_path) / ".feature" - self.feature_filename = "feature.md" - self.progress_filename = "progress.json" - self.task_filename = "task.md" + self._feature_path = Path(project_path) / ".feature" + self._feature_filename = "feature.md" + self._progress_filename = "progress.json" + self._task_filename = "task.md" + self._files_filename = "files.yml" + + self.file_selector = FileSelector( + Path(project_path) / ".feature" / self._files_filename, repository + ) super().__init__(self.feature_path) diff --git a/gpt_engineer/applications/interactive_cli/file_selection.py b/gpt_engineer/applications/interactive_cli/file_selection.py index 9f983b893e..d4157b2b1b 100644 --- a/gpt_engineer/applications/interactive_cli/file_selection.py +++ b/gpt_engineer/applications/interactive_cli/file_selection.py @@ -150,10 +150,10 @@ class FileSelector: Manages the active files in a project directory and creates a YAML file listing them. """ - def __init__(self, project_path: str, repository: Repository): + def __init__(self, yaml_path: str, repository: Repository): self.ai = AI("gpt-4o", temperature=0) self.repository = repository - self.yaml_path = os.path.join(project_path, ".feature", "files.yml") + self.yaml_path = yaml_path if os.path.exists(self.yaml_path): return diff --git a/gpt_engineer/applications/interactive_cli/generation_tools.py b/gpt_engineer/applications/interactive_cli/generation_tools.py index a498af3d1d..e46e7ba516 100644 --- a/gpt_engineer/applications/interactive_cli/generation_tools.py +++ b/gpt_engineer/applications/interactive_cli/generation_tools.py @@ -133,6 +133,8 @@ def generate_suggested_tasks( def fuzzy_parse_file_selection(ai: AI, yaml_string: str) -> FileSelection: + # todo: load prompt from ptompts/fuzzy_file_parser + system_prompt = """## Explanation You are a fuzzy yaml parser, who correctly parses yaml even if it is not strictly valid. diff --git a/gpt_engineer/applications/interactive_cli/main.py b/gpt_engineer/applications/interactive_cli/main.py index 44baa4fe33..bc050c5b0c 100644 --- a/gpt_engineer/applications/interactive_cli/main.py +++ b/gpt_engineer/applications/interactive_cli/main.py @@ -1,7 +1,7 @@ import typer from dotenv import load_dotenv -from gpt_engineer.applications.interactive_cli.agent import FeatureAgent +from gpt_engineer.applications.interactive_cli.agents.feature_agent import FeatureAgent from gpt_engineer.applications.interactive_cli.feature import Feature from gpt_engineer.applications.interactive_cli.repository import Repository from gpt_engineer.applications.interactive_cli.domain import Settings @@ -11,22 +11,14 @@ app = typer.Typer() -@app.command() -def main( - project_path: str = typer.Argument(".", help="path"), - model: str = typer.Argument("gpt-4-turbo", help="model id string"), - new: bool = typer.Option(False, "--new", "-n", help="Initialize new feature."), - no_branch: bool = typer.Option( - False, - "--no-branch", - "-nb", - help="Do not create a new feature branch for this work.", - ), +def common_options( + project_path: str = typer.Option(".", "--path", "-p", help="Path to the project."), + model: str = typer.Option("gpt-4o", "--model", "-m", help="Model ID string."), temperature: float = typer.Option( 0.1, "--temperature", "-t", - help="Controls randomness: lower values for more focused, deterministic outputs", + help="Controls randomness: lower values for more focused, deterministic outputs.", ), azure_endpoint: str = typer.Option( "", @@ -41,27 +33,77 @@ def main( debug: bool = typer.Option( False, "--debug", "-d", help="Enable debug mode for debugging." ), +): + return { + "project_path": project_path, + "model": model, + "no_branch": no_branch, + "temperature": temperature, + "azure_endpoint": azure_endpoint, + "verbose": verbose, + "debug": debug, + } + + +@app.command() +def task( + new: bool = typer.Option(False, "--new", "-n", help="Initialize a new task."), + **options, ): """ - Run GPTE Interactive Improve + Handle tasks in the project. """ - load_dotenv() + options = common_options(**options) + + ai = AI( + model_name=options["model"], + temperature=options["temperature"], + azure_endpoint=options["azure_endpoint"], + ) + + repository = Repository(options["project_path"]) + + feature = Feature(options["project_path"]) + + agent = FeatureAgent(options["project_path"], feature, repository, ai) + + settings = Settings(options["no_branch"]) + + if new: + agent.init(settings) + else: + agent.resume(settings) - # todo: check that git repo exists. If not - ask the user to create a git repository with a suitable git ignore which will be used to reduce ai usage - # todo: check that git repo is clean. If not - ask the user to stash or commit changes. + +@app.command() +def feature( + new: bool = typer.Option(False, "--new", "-n", help="Initialize a new feature."), + no_branch: bool = typer.Option( + False, + "--no-branch", + "-nb", + help="Do not create a new feature branch for this work.", + ), + **options, +): + """ + Handle features in the project. + """ + load_dotenv() + options = common_options(**options) ai = AI( - model_name=model, - temperature=temperature, - azure_endpoint=azure_endpoint, + model_name=options["model"], + temperature=options["temperature"], + azure_endpoint=options["azure_endpoint"], ) - repository = Repository(project_path) + repository = Repository(options["project_path"]) - feature = Feature(project_path) + feature = Feature(options["project_path"], repository) - agent = FeatureAgent(project_path, feature, repository, ai) + agent = FeatureAgent(feature, repository, ai) settings = Settings(no_branch) @@ -71,5 +113,27 @@ def main( agent.resume(settings) +@app.command() +def chat(**options): + """ + Initiate a chat about the current repository. + """ + load_dotenv() + options = common_options(**options) + + ai = AI( + model_name=options["model"], + temperature=options["temperature"], + azure_endpoint=options["azure_endpoint"], + ) + + repository = Repository(options["project_path"]) + + # Add the logic for initiating a chat here + typer.echo( + f"Initiating a chat about the repo at {options['project_path']} using model {options['model']}." + ) + + if __name__ == "__main__": app() diff --git a/gpt_engineer/applications/interactive_cli/prompts/__init__.py b/gpt_engineer/applications/interactive_cli/prompts/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/gpt_engineer/applications/interactive_cli/prompts/fuzzy_file_parser b/gpt_engineer/applications/interactive_cli/prompts/fuzzy_file_parser new file mode 100644 index 0000000000..75200759b1 --- /dev/null +++ b/gpt_engineer/applications/interactive_cli/prompts/fuzzy_file_parser @@ -0,0 +1,320 @@ +## Explanation +You are a fuzzy yaml parser, who correctly parses yaml even if it is not strictly valid. + +A user has been given a yaml representation of a file structure, represented like so: + +.github: + ISSUE_TEMPLATE: + - bug-report.md + - documentation-clarification.md + - feature-request.md + PULL_REQUEST_TEMPLATE: + - PULL_REQUEST_TEMPLATE.md + workflows: + - automation.yml + - ci.yaml + - pre-commit.yaml + - release.yaml + (./): + - CODEOWNERS + - CODE_OF_CONDUCT.md + - CONTRIBUTING.md + - FUNDING.yml + +Folders are represented as keys in a dictionary, files are items in a list. Any files listed under the (./) key can be assumed to be files of the folder above that. + +The given example maps to these file paths: + +".github/ISSUE_TEMPLATE/bug-report.md", +".github/ISSUE_TEMPLATE/documentation-clarification.md", +".github/ISSUE_TEMPLATE/feature-request.md", +".github/PULL_REQUEST_TEMPLATE/PULL_REQUEST_TEMPLATE.md", +".github/workflows/automation.yml", +".github/workflows/ci.yaml", +".github/workflows/pre-commit.yaml", +".github/workflows/release.yaml", +".github/CODEOWNERS", +".github/CODE_OF_CONDUCT.md", +".github/CONTRIBUTING.md", +".github/FUNDING.yml", + +An example of the yaml file after commenting might be something like this: + + +.github: + # ISSUE_TEMPLATE: + # - bug-report.md + # - documentation-clarification.md + # - feature-request.md + # PULL_REQUEST_TEMPLATE: + # - PULL_REQUEST_TEMPLATE.md + workflows: + - automation.yml + - ci.yaml + - pre-commit.yaml + - release.yaml + # (./): + # - CODEOWNERS + - CODE_OF_CONDUCT.md + - CONTRIBUTING.md + # - FUNDING.yml + + +This would convert into: + +{ + "included_files": [ + ".github/workflows/automation.yml", + ".github/workflows/ci.yaml", + ".github/workflows/pre-commit.yaml", + ".github/workflows/release.yaml", + ".github/CODE_OF_CONDUCT.md", + ".github/CONTRIBUTING.md" + ], + "excluded_files": [ + ".github/ISSUE_TEMPLATE/bug-report.md", + ".github/ISSUE_TEMPLATE/documentation-clarification.md", + ".github/ISSUE_TEMPLATE/feature-request.md", + ".github/PULL_REQUEST_TEMPLATE/PULL_REQUEST_TEMPLATE.md", + ".github/CODEOWNERS", + ".github/FUNDING.yml" + ] +} + + +Although the commmented content wasnt strictly correct yaml, their intentions were clear. They wanted to retain the files in the workflow folder aswell as the code of conduct and contributing guides + +Based on commented yaml inputs such as this, your job is to output JSON, indicating which files have been included and which have been excluded. + +Excluded files are always commented out with a # like in the above example. + +The json you should return will be like this: + +{ + "included_files": [ + "folder1/file5", + "folder1/folder3/file3", + "file7" + ], + "excluded_files": [ + "folder1/folder2/file1", + "folder1/folder2/file2", + "folder1/folder3/file4", + "folder1/file5", + ] +} + +Files can only be included or excluded, not both. If you are confused about the state of a file make your best guess - and if you really arent sure then mark it as included. + +Respond in JSON and nothing else. + +## Examples + +Example 1: + +Input: + +.github: + ISSUE_TEMPLATE: + - bug_report.md + - feature_request.md + PULL_REQUEST_TEMPLATE: + - pull_request_template.md + # workflows: + # - ci.yml + # - release.yml + +Output: + +{ + "included_files": [ + ".github/ISSUE_TEMPLATE/bug_report.md", + ".github/ISSUE_TEMPLATE/feature_request.md", + ".github/PULL_REQUEST_TEMPLATE/pull_request_template.md" + ], + "excluded_files": [ + ".github/workflows/ci.yml", + ".github/workflows/release.yml" + ] +} + +Example 2: + +Input: + +source: + # controllers: + # - MainController.cs + # - AuthController.cs + models: + - User.cs + - Post.cs + views: + Home: + - Index.cshtml + # - About.cshtml + Auth: + - Login.cshtml + - Register.cshtml + (./): + - Dockerfile + +Output: + +{ + "included_files": [ + "source/models/User.cs", + "source/models/Post.cs", + "source/views/Home/Index.cshtml", + "source/views/Auth/Login.cshtml", + "source/views/Auth/Register.cshtml" + "source/Dockerfile", + ], + "excluded_files": [ + "source/controllers/MainController.cs", + "source/controllers/AuthController.cs", + "source/views/Home/About.cshtml" + ] +} + +Example 3: + +Input: + +src: + main: + java: + com: + example: + # controllers: + # - UserController.java + # - PostController.java + models: + - User.java + - Post.java + # repositories: + # - UserRepository.java + # - PostRepository.java + services: + - UserService.java + - PostService.java + resources: + - application.properties + test: + java: + com: + example: + controllers: + - UserControllerTest.java + - PostControllerTest.java + (./): + - pom.xml + - Dockerfile + +Output: + +{ + "included_files": [ + "src/main/java/com/example/models/User.java", + "src/main/java/com/example/models/Post.java", + "src/main/java/com/example/services/UserService.java", + "src/main/java/com/example/services/PostService.java", + "src/main/resources/application.properties", + "src/test/java/com/example/controllers/UserControllerTest.java", + "src/test/java/com/example/controllers/PostControllerTest.java", + "pom.xml", + "Dockerfile" + ], + "excluded_files": [ + "src/main/java/com/example/controllers/UserController.java", + "src/main/java/com/example/controllers/PostController.java", + "src/main/java/com/example/repositories/UserRepository.java", + "src/main/java/com/example/repositories/PostRepository.java" + ] +} + +Example 4: + +Input: + + +app: + # controllers: + # - application_controller.rb + # - users_controller.rb + models: + - user.rb + - post.rb + views: + layouts: + - application.html.erb + users: + - index.html.erb + - show.html.erb + posts: + - index.html.erb + # - show.html.erb + (./): + - Gemfile + - config +config: + environments: + - development.rb + - test.rb + # - production.rb + initializers: + - application_controller_renderer.rb + locales: + - en.yml + # routes.rb +db: + migrate: + - 20211025120523_create_users.rb + - 20211025120530_create_posts.rb +test: + fixtures: + - users.yml + - posts.yml + # controllers: + # - users_controller_test.rb + # - posts_controller_test.rb + models: + - user_test.rb + - post_test.rb + + +Output: + +{ + "included_files": [ + "app/models/user.rb", + "app/models/post.rb", + "app/views/layouts/application.html.erb", + "app/views/users/index.html.erb", + "app/views/users/show.html.erb", + "app/views/posts/index.html.erb", + "app/Gemfile", + "config/environments/development.rb", + "config/environments/test.rb", + "config/initializers/application_controller_renderer.rb", + "config/locales/en.yml", + "db/migrate/20211025120523_create_users.rb", + "db/migrate/20211025120530_create_posts.rb", + "test/fixtures/users.yml", + "test/fixtures/posts.yml", + "test/models/user_test.rb", + "test/models/post_test.rb" + ], + "excluded_files": [ + "app/controllers/application_controller.rb", + "app/controllers/users_controller.rb", + "app/views/posts/show.html.erb", + "config/environments/production.rb", + "config/routes.rb", + "test/controllers/users_controller_test.rb", + "test/controllers/posts_controller_test.rb" + ] +} + +## IMPORTANT +Remember any line that is commented is an excluded file. Any line that is NOT commented - is an included file. \ No newline at end of file From 6525194eafe5daf54f1289d800f97d2da56ee2d1 Mon Sep 17 00:00:00 2001 From: Theo McCabe Date: Thu, 23 May 2024 09:10:36 +0100 Subject: [PATCH 27/36] chat working --- .../interactive_cli/agents/agent_steps.py | 8 +- .../interactive_cli/agents/chat_agent.py | 66 ++++++++++- .../interactive_cli/agents/feature_agent.py | 10 +- .../applications/interactive_cli/feature.py | 27 +++-- .../interactive_cli/generation_tools.py | 17 +-- .../applications/interactive_cli/main.py | 106 +++++++----------- pyproject.toml | 1 + 7 files changed, 137 insertions(+), 98 deletions(-) diff --git a/gpt_engineer/applications/interactive_cli/agents/agent_steps.py b/gpt_engineer/applications/interactive_cli/agents/agent_steps.py index b5839230e8..29c369fd28 100644 --- a/gpt_engineer/applications/interactive_cli/agents/agent_steps.py +++ b/gpt_engineer/applications/interactive_cli/agents/agent_steps.py @@ -126,6 +126,12 @@ def run_task_loop( context_string = build_context_string(feature, repository.get_git_context()) + feature_agent_context = f"""I am working on a feature but breaking it up into small incremental tasks. Your job is to complete the incremental task provided to you - only that task and nothing more. + +The purpose of this message is to give you wider context around the feature you are working on and what incremental tasks have already been completed so far. + +{context_string}""" + prompt = Prompt(feature.get_task(), prefix="Task: ") selected_files = file_selector.get_from_yaml().included_files @@ -133,7 +139,7 @@ def run_task_loop( files = Files(project_path, selected_files) improve_lambda = lambda: improve_fn( - ai, prompt, files, memory, preprompts_holder, context_string + ai, prompt, files, memory, preprompts_holder, feature_agent_context ) print("\n---- begining code generation ----\n") diff --git a/gpt_engineer/applications/interactive_cli/agents/chat_agent.py b/gpt_engineer/applications/interactive_cli/agents/chat_agent.py index d5573ba686..bbb3a78bc9 100644 --- a/gpt_engineer/applications/interactive_cli/agents/chat_agent.py +++ b/gpt_engineer/applications/interactive_cli/agents/chat_agent.py @@ -1 +1,65 @@ -# todo : write chat agent +from gpt_engineer.core.ai import AI, HumanMessage, SystemMessage + +from gpt_engineer.applications.interactive_cli.feature import Feature +from gpt_engineer.applications.interactive_cli.repository import Repository +from gpt_engineer.applications.interactive_cli.files import Files +from gpt_engineer.applications.interactive_cli.agents.agent_steps import ( + update_user_file_selection, +) +from gpt_engineer.applications.interactive_cli.generation_tools import ( + build_files_context_string, +) + + +class ChatAgent: + + def __init__( + self, + ai: AI, + project_path: str, + feature: Feature, + repository: Repository, + ): + self.ai = ai + self.project_path = project_path + self.feature = feature + self.repository = repository + + def start(self): + + update_user_file_selection(self.feature.file_selector) + + selected_files = self.feature.file_selector.get_from_yaml().included_files + + files = Files(self.project_path, selected_files) + + context_string = build_files_context_string( + self.feature, self.repository.get_git_context(), files + ) + + system = f"""You are the chat function of an AI software engineering tool called gpt engineer. + +The tool takes a feature descriptioin, progress on the feature, git context, and repository files relevent to the feature +and based on that it suggests new tasks to complete in order to progress the feature, and it implements those tasks for the user. + +You are not that tool, you are the chat function of that tool. You are here to help the user discuss their code and their feature and understand discuss any part of it with you - a software engineering expert. + +Always provide advice as to best software engineering practices. + +Here is the context for your conversation: + +{context_string}""" + + messages = [ + SystemMessage(content=system), + HumanMessage(content="Hi"), + ] + + while True: + print("\nAI:") + response = self.ai.backoff_inference(messages) + messages.append(response) + + print("\n\nYou:") + user_message = input() + messages.append(HumanMessage(content=user_message)) diff --git a/gpt_engineer/applications/interactive_cli/agents/feature_agent.py b/gpt_engineer/applications/interactive_cli/agents/feature_agent.py index c732da7341..73cd5e0e6a 100644 --- a/gpt_engineer/applications/interactive_cli/agents/feature_agent.py +++ b/gpt_engineer/applications/interactive_cli/agents/feature_agent.py @@ -21,13 +21,15 @@ class FeatureAgent(BaseAgent): def __init__( self, + ai: AI, + project_path: str, feature: Feature, repository: Repository, - ai: AI = None, ): + self.ai = ai + self.project_path = project_path self.feature = feature self.repository = repository - self.ai = ai or AI() def init(self, settings: Settings): @@ -35,7 +37,7 @@ def init(self, settings: Settings): self.ai, self.feature, self.repository, settings.no_branch ) - update_user_file_selection(self.file_selector) + update_user_file_selection(self.feature.file_selector) update_task_description(self.feature) @@ -52,7 +54,7 @@ def resume(self, settings: Settings): self.feature, self.repository, self.ai, - self.file_selector, + self.feature.file_selector, ) def improve(self): diff --git a/gpt_engineer/applications/interactive_cli/feature.py b/gpt_engineer/applications/interactive_cli/feature.py index c172ea994c..46d5692686 100644 --- a/gpt_engineer/applications/interactive_cli/feature.py +++ b/gpt_engineer/applications/interactive_cli/feature.py @@ -31,7 +31,7 @@ def __init__(self, project_path: Union[str, Path], repository: Repository): Path(project_path) / ".feature" / self._files_filename, repository ) - super().__init__(self.feature_path) + super().__init__(self._feature_path) def clear_feature(self) -> None: self.set_description( @@ -43,7 +43,7 @@ def clear_feature(self) -> None: self.set_task( "Please replace with a task description - directing the AI on the first task to implement on this feature" ) - super().__setitem__(self.progress_filename, json.dumps({"done": []})) + super().__setitem__(self._progress_filename, json.dumps({"done": []})) def get_description(self) -> str: """ @@ -54,7 +54,7 @@ def get_description(self) -> str: str The content of the feature file. """ - return super().__getitem__(self.feature_filename) + return super().__getitem__(self._feature_filename) def set_description(self, feature_description: str): """ @@ -65,7 +65,7 @@ def set_description(self, feature_description: str): feature_description : str The new feature_description to write to the feature file. """ - super().__setitem__(self.feature_filename, feature_description) + super().__setitem__(self._feature_filename, feature_description) def get_progress(self) -> dict: """ @@ -76,7 +76,12 @@ def get_progress(self) -> dict: str The content of the feature file. """ - return json.loads(super().__getitem__(self.progress_filename)) + + json_string = super().__getitem__(self._progress_filename) + if json_string: + return json.loads(json_string) + + return None def update_progress(self, task: str): """ @@ -89,7 +94,7 @@ def update_progress(self, task: str): """ progress = self.get_progress() new_progress = progress["done"].append(task) - super().__setitem__(self.progress_filename, json.dumps(new_progress, indent=4)) + super().__setitem__(self._progress_filename, json.dumps(new_progress, indent=4)) def set_task(self, task: str): """ @@ -100,7 +105,7 @@ def set_task(self, task: str): task : str The new task to write to the feature file. """ - super().__setitem__(self.task_filename, task) + super().__setitem__(self._task_filename, task) def get_task(self) -> str: """ @@ -111,7 +116,7 @@ def get_task(self) -> str: str The content of the feature file. """ - return super().__getitem__(self.task_filename) + return super().__getitem__(self._task_filename) def complete_task(self): """ @@ -124,7 +129,7 @@ def complete_task(self): self.set_task("") def _file_path(self, filename): - return self.feature_path / filename + return self._feature_path / filename def _open_file_in_editor(self, path): """ @@ -144,10 +149,10 @@ def open_feature_in_editor(self): """ Opens the feature file in the default system editor. """ - self._open_file_in_editor(self._file_path(self.feature_filename)) + self._open_file_in_editor(self._file_path(self._feature_filename)) def open_task_in_editor(self): """ Opens the task file in the default system editor. """ - self._open_file_in_editor(self._file_path(self.task_filename)) + self._open_file_in_editor(self._file_path(self._task_filename)) diff --git a/gpt_engineer/applications/interactive_cli/generation_tools.py b/gpt_engineer/applications/interactive_cli/generation_tools.py index e46e7ba516..95dd3717c3 100644 --- a/gpt_engineer/applications/interactive_cli/generation_tools.py +++ b/gpt_engineer/applications/interactive_cli/generation_tools.py @@ -1,10 +1,7 @@ import xml.etree.ElementTree as ET import json -from gpt_engineer.applications.interactive_cli.feature import Feature -from gpt_engineer.applications.interactive_cli.files import Files from gpt_engineer.applications.interactive_cli.domain import FileSelection -from gpt_engineer.applications.interactive_cli.repository import GitContext from gpt_engineer.core.ai import AI from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler @@ -58,12 +55,8 @@ def parse_task_xml_to_class(xml_data): return response -def build_context_string(feature: Feature, git_context: GitContext): - return f"""I am working on a feature but breaking it up into small incremental tasks. Your job is to complete the incremental task provided to you - only that task and nothign more. - -The purpose of this message is to give you wider context around the feature you are working on and what incremental tasks have already been completed so far. - -## Feature - this is the description fo the current feature we are working on. +def build_context_string(feature, git_context): + return f"""## Feature - this is the description fo the current feature we are working on. {feature.get_description()} ## Completed Tasks - these are the lists of tasks you have completed so far on the feature branch. @@ -78,7 +71,7 @@ def build_context_string(feature: Feature, git_context: GitContext): """ -def build_files_context_string(feature, git_context, files: Files): +def build_files_context_string(feature, git_context, files): return f"""{build_context_string(feature, git_context)} ## Current Codebase - this is the as is view of the current code base including any unstaged changes. @@ -86,9 +79,7 @@ def build_files_context_string(feature, git_context, files: Files): """ -def generate_suggested_tasks( - ai: AI, feature: Feature, git_context: GitContext, files: Files -) -> str: +def generate_suggested_tasks(ai: AI, feature, git_context, files) -> str: system_prompt = """ You are a software engineer work planning tool. Given a feature description, a list of tasks already completed, and sections of the code repository we are working on, suggest a list of tasks to be done in order to move towards the end goal of completing the feature. diff --git a/gpt_engineer/applications/interactive_cli/main.py b/gpt_engineer/applications/interactive_cli/main.py index bc050c5b0c..90548e2589 100644 --- a/gpt_engineer/applications/interactive_cli/main.py +++ b/gpt_engineer/applications/interactive_cli/main.py @@ -2,6 +2,7 @@ from dotenv import load_dotenv from gpt_engineer.applications.interactive_cli.agents.feature_agent import FeatureAgent +from gpt_engineer.applications.interactive_cli.agents.chat_agent import ChatAgent from gpt_engineer.applications.interactive_cli.feature import Feature from gpt_engineer.applications.interactive_cli.repository import Repository from gpt_engineer.applications.interactive_cli.domain import Settings @@ -11,7 +12,20 @@ app = typer.Typer() -def common_options( +# @app.command() +# def task( +# new: bool = typer.Option(False, "--new", "-n", help="Initialize a new task."), +# **options, +# ): +# """ +# Handle tasks in the project. +# """ +# # TO BE IMPLEMENTED + + +@app.command() +def feature( + new: bool = typer.Option(False, "--new", "-n", help="Initialize a new feature."), project_path: str = typer.Option(".", "--path", "-p", help="Path to the project."), model: str = typer.Option("gpt-4o", "--model", "-m", help="Model ID string."), temperature: float = typer.Option( @@ -25,7 +39,7 @@ def common_options( "--azure", "-a", help="""Endpoint for your Azure OpenAI Service (https://xx.openai.azure.com). - In that case, the given model is the deployment name chosen in the Azure AI Studio.""", + In that case, the given model is the deployment name chosen in the Azure AI Studio.""", ), verbose: bool = typer.Option( False, "--verbose", "-v", help="Enable verbose logging for debugging." @@ -33,77 +47,28 @@ def common_options( debug: bool = typer.Option( False, "--debug", "-d", help="Enable debug mode for debugging." ), -): - return { - "project_path": project_path, - "model": model, - "no_branch": no_branch, - "temperature": temperature, - "azure_endpoint": azure_endpoint, - "verbose": verbose, - "debug": debug, - } - - -@app.command() -def task( - new: bool = typer.Option(False, "--new", "-n", help="Initialize a new task."), - **options, -): - """ - Handle tasks in the project. - """ - load_dotenv() - options = common_options(**options) - - ai = AI( - model_name=options["model"], - temperature=options["temperature"], - azure_endpoint=options["azure_endpoint"], - ) - - repository = Repository(options["project_path"]) - - feature = Feature(options["project_path"]) - - agent = FeatureAgent(options["project_path"], feature, repository, ai) - - settings = Settings(options["no_branch"]) - - if new: - agent.init(settings) - else: - agent.resume(settings) - - -@app.command() -def feature( - new: bool = typer.Option(False, "--new", "-n", help="Initialize a new feature."), no_branch: bool = typer.Option( False, "--no-branch", "-nb", help="Do not create a new feature branch for this work.", ), - **options, ): """ Handle features in the project. """ load_dotenv() - options = common_options(**options) ai = AI( - model_name=options["model"], - temperature=options["temperature"], - azure_endpoint=options["azure_endpoint"], + model_name=model, + temperature=temperature, ) - repository = Repository(options["project_path"]) + repository = Repository(project_path) - feature = Feature(options["project_path"], repository) + feature = Feature(project_path, repository) - agent = FeatureAgent(feature, repository, ai) + agent = FeatureAgent(ai, project_path, feature, repository) settings = Settings(no_branch) @@ -114,25 +79,30 @@ def feature( @app.command() -def chat(**options): +def chat( + project_path: str = typer.Option(".", "--path", "-p", help="Path to the project."), + model: str = typer.Option("gpt-4o", "--model", "-m", help="Model ID string."), + temperature: float = typer.Option( + 0.8, + "--temperature", + "-t", + help="Controls randomness: lower values for more focused, deterministic outputs.", + ), +): """ Initiate a chat about the current repository. """ - load_dotenv() - options = common_options(**options) - ai = AI( - model_name=options["model"], - temperature=options["temperature"], - azure_endpoint=options["azure_endpoint"], + model_name=model, + temperature=temperature, ) - repository = Repository(options["project_path"]) + repository = Repository(project_path) - # Add the logic for initiating a chat here - typer.echo( - f"Initiating a chat about the repo at {options['project_path']} using model {options['model']}." - ) + feature = Feature(project_path, repository) + chat_agent = ChatAgent(ai, project_path, feature, repository) + + chat_agent.start() if __name__ == "__main__": diff --git a/pyproject.toml b/pyproject.toml index 68faccbfc6..0bdea5c8ff 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -66,6 +66,7 @@ sphinx_copybutton = ">=0.5.2" gpt-engineer = 'gpt_engineer.applications.cli.main:app' ge = 'gpt_engineer.applications.cli.main:app' gpte = 'gpt_engineer.applications.cli.main:app' +gpti = 'gpt_engineer.applications.interactive_cli.main:app' bench = 'gpt_engineer.benchmark.__main__:app' gpte_test_application = 'tests.caching_main:app' From 27044969a7dfd9a25ad7002fdb55faf37bbb5044 Mon Sep 17 00:00:00 2001 From: Theo McCabe Date: Thu, 23 May 2024 11:42:31 +0100 Subject: [PATCH 28/36] cool --- .../interactive_cli/agents/agent_steps.py | 24 +++++++++---------- .../interactive_cli/agents/feature_agent.py | 10 ++------ .../applications/interactive_cli/feature.py | 10 ++++++-- pyproject.toml | 2 +- 4 files changed, 22 insertions(+), 24 deletions(-) diff --git a/gpt_engineer/applications/interactive_cli/agents/agent_steps.py b/gpt_engineer/applications/interactive_cli/agents/agent_steps.py index 29c369fd28..93daf308a1 100644 --- a/gpt_engineer/applications/interactive_cli/agents/agent_steps.py +++ b/gpt_engineer/applications/interactive_cli/agents/agent_steps.py @@ -79,9 +79,8 @@ def check_for_unstaged_changes( return -def confirm_feature_context_and_task_with_user( - feature: Feature, file_selector: FileSelector -): +def confirm_feature_context_and_task_with_user(feature: Feature): + file_selector = feature.file_selector file_selector.update_yaml_from_tracked_files() file_string = file_selector.get_pretty_selected_from_yaml() @@ -118,7 +117,6 @@ def run_task_loop( feature: Feature, repository: Repository, ai: AI, - file_selector: FileSelector, ): memory = DiskMemory(memory_path(project_path)) @@ -134,7 +132,7 @@ def run_task_loop( prompt = Prompt(feature.get_task(), prefix="Task: ") - selected_files = file_selector.get_from_yaml().included_files + selected_files = feature.file_selector.get_from_yaml().included_files files = Files(project_path, selected_files) @@ -149,15 +147,15 @@ def run_task_loop( files.write_to_disk(updated_files_dictionary) - review_changes(project_path, feature, repository, ai, file_selector) + review_changes(project_path, feature, repository, ai) -def run_adjust_loop(feature, file_selector): - implement = confirm_feature_context_and_task_with_user(feature, file_selector) +def run_adjust_loop(feature): + implement = confirm_feature_context_and_task_with_user(feature) while not implement: adjust_feature_task_or_files() - implement = confirm_feature_context_and_task_with_user(feature, file_selector) + implement = confirm_feature_context_and_task_with_user(feature) def run_task(repository, project_path, feature, ai, file_selector): @@ -166,19 +164,19 @@ def run_task(repository, project_path, feature, ai, file_selector): run_task_loop(project_path, feature, repository, ai, file_selector) -def complete_task(repository, project_path, feature, ai, file_selector): +def complete_task(repository, project_path, feature, ai): print("Completing task... ") repository.stage_all_changes() feature.complete_task() - file_selector.update_yaml_from_tracked_files() + feature.file_selector.update_yaml_from_tracked_files() print("Continuing with next task...") update_task_description(feature) - run_adjust_loop(feature, file_selector) + run_adjust_loop(feature) check_for_unstaged_changes(repository) - run_task_loop(project_path, feature, repository, ai, file_selector) + run_task_loop(project_path, feature, repository, ai) def review_changes( diff --git a/gpt_engineer/applications/interactive_cli/agents/feature_agent.py b/gpt_engineer/applications/interactive_cli/agents/feature_agent.py index 73cd5e0e6a..b75003c0da 100644 --- a/gpt_engineer/applications/interactive_cli/agents/feature_agent.py +++ b/gpt_engineer/applications/interactive_cli/agents/feature_agent.py @@ -45,17 +45,11 @@ def init(self, settings: Settings): def resume(self, settings: Settings): - run_adjust_loop(self.feature, self.file_selector) + run_adjust_loop(self.feature) check_for_unstaged_changes(self.repository) - run_task_loop( - self.project_path, - self.feature, - self.repository, - self.ai, - self.feature.file_selector, - ) + run_task_loop(self.project_path, self.feature, self.repository, self.ai) def improve(self): self.resume() diff --git a/gpt_engineer/applications/interactive_cli/feature.py b/gpt_engineer/applications/interactive_cli/feature.py index 46d5692686..eb359893b7 100644 --- a/gpt_engineer/applications/interactive_cli/feature.py +++ b/gpt_engineer/applications/interactive_cli/feature.py @@ -93,8 +93,14 @@ def update_progress(self, task: str): The new feature_description to write to the feature file. """ progress = self.get_progress() - new_progress = progress["done"].append(task) - super().__setitem__(self._progress_filename, json.dumps(new_progress, indent=4)) + + print(progress["done"]) + + progress["done"].append(task) + + json_string = json.dumps(progress, indent=4) + + super().__setitem__(self._progress_filename, json_string) def set_task(self, task: str): """ diff --git a/pyproject.toml b/pyproject.toml index 0bdea5c8ff..5b0e270462 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -66,7 +66,7 @@ sphinx_copybutton = ">=0.5.2" gpt-engineer = 'gpt_engineer.applications.cli.main:app' ge = 'gpt_engineer.applications.cli.main:app' gpte = 'gpt_engineer.applications.cli.main:app' -gpti = 'gpt_engineer.applications.interactive_cli.main:app' +gptf = 'gpt_engineer.applications.interactive_cli.main:app' bench = 'gpt_engineer.benchmark.__main__:app' gpte_test_application = 'tests.caching_main:app' From 802ec8a60fe46a50ec2db3ee2f2e43869de828d8 Mon Sep 17 00:00:00 2001 From: Theo McCabe Date: Sun, 26 May 2024 12:39:36 +0100 Subject: [PATCH 29/36] wip --- .gitignore | 1 + .../interactive_cli/agents/agent_steps.py | 40 ++++---- .../interactive_cli/agents/chat_agent.py | 7 +- .../interactive_cli/agents/feature_agent.py | 15 ++- .../interactive_cli/agents/task_agent.py | 95 ++++++++++++++++++- .../applications/interactive_cli/feature.py | 10 +- .../interactive_cli/file_selection.py | 10 +- .../interactive_cli/generation_tools.py | 20 ++-- .../applications/interactive_cli/main.py | 64 ++++++++++--- .../applications/interactive_cli/task.py | 79 +++++++++++++++ 10 files changed, 287 insertions(+), 54 deletions(-) create mode 100644 gpt_engineer/applications/interactive_cli/task.py diff --git a/.gitignore b/.gitignore index c9039a7bf4..7f731cfe72 100644 --- a/.gitignore +++ b/.gitignore @@ -97,6 +97,7 @@ prompt .feature +.task gpt_engineer/benchmark/minimal_bench_config.toml diff --git a/gpt_engineer/applications/interactive_cli/agents/agent_steps.py b/gpt_engineer/applications/interactive_cli/agents/agent_steps.py index 93daf308a1..9c19d95dea 100644 --- a/gpt_engineer/applications/interactive_cli/agents/agent_steps.py +++ b/gpt_engineer/applications/interactive_cli/agents/agent_steps.py @@ -4,7 +4,7 @@ from gpt_engineer.applications.interactive_cli.files import Files from gpt_engineer.applications.interactive_cli.generation_tools import ( generate_branch_name, - build_context_string, + build_feature_context_string, ) from gpt_engineer.core.ai import AI @@ -13,7 +13,6 @@ from gpt_engineer.core.default.disk_memory import DiskMemory from gpt_engineer.core.default.paths import PREPROMPTS_PATH, memory_path from gpt_engineer.core.preprompts_holder import PrepromptsHolder -from gpt_engineer.core.prompt import Prompt from prompt_toolkit import prompt as cli_input from prompt_toolkit.validation import ValidationError, Validator @@ -21,6 +20,7 @@ from prompt_toolkit.completion import WordCompleter +# This is a random comment to prove the assistant works class FeatureValidator(Validator): def validate(self, document): text = document.text @@ -62,6 +62,7 @@ def update_feature_description(feature: Feature): def update_task_description(feature: Feature): + feature.open_task_in_editor() input("Please edit the task file and then press Enter to continue...") @@ -79,8 +80,9 @@ def check_for_unstaged_changes( return -def confirm_feature_context_and_task_with_user(feature: Feature): - file_selector = feature.file_selector +def confirm_feature_context_and_task_with_user( + feature: Feature, file_selector: FileSelector +): file_selector.update_yaml_from_tracked_files() file_string = file_selector.get_pretty_selected_from_yaml() @@ -108,7 +110,7 @@ def confirm_feature_context_and_task_with_user(feature: Feature): # t - "edit task" using update_task_description step # c - complete the task and start a new one # x - exit -def adjust_feature_task_or_files(): +def adjust_prompt_files(): input("Please edit the prompt files and then press Enter to continue...") @@ -117,12 +119,13 @@ def run_task_loop( feature: Feature, repository: Repository, ai: AI, + file_selector: FileSelector, ): memory = DiskMemory(memory_path(project_path)) preprompts_holder = PrepromptsHolder(PREPROMPTS_PATH) - context_string = build_context_string(feature, repository.get_git_context()) + context_string = build_feature_context_string(feature, repository.get_git_context()) feature_agent_context = f"""I am working on a feature but breaking it up into small incremental tasks. Your job is to complete the incremental task provided to you - only that task and nothing more. @@ -132,7 +135,7 @@ def run_task_loop( prompt = Prompt(feature.get_task(), prefix="Task: ") - selected_files = feature.file_selector.get_from_yaml().included_files + selected_files = file_selector.get_from_yaml().included_files files = Files(project_path, selected_files) @@ -147,15 +150,15 @@ def run_task_loop( files.write_to_disk(updated_files_dictionary) - review_changes(project_path, feature, repository, ai) + review_changes(project_path, feature, repository, ai, file_selector) -def run_adjust_loop(feature): - implement = confirm_feature_context_and_task_with_user(feature) +def run_adjust_loop(feature, file_selector): + implement = confirm_feature_context_and_task_with_user(feature, file_selector) while not implement: - adjust_feature_task_or_files() - implement = confirm_feature_context_and_task_with_user(feature) + adjust_prompt_files() + implement = confirm_feature_context_and_task_with_user(feature, file_selector) def run_task(repository, project_path, feature, ai, file_selector): @@ -164,19 +167,19 @@ def run_task(repository, project_path, feature, ai, file_selector): run_task_loop(project_path, feature, repository, ai, file_selector) -def complete_task(repository, project_path, feature, ai): +def complete_task(repository, project_path, feature, ai, file_selector): print("Completing task... ") repository.stage_all_changes() feature.complete_task() - feature.file_selector.update_yaml_from_tracked_files() + file_selector.update_yaml_from_tracked_files() print("Continuing with next task...") update_task_description(feature) - run_adjust_loop(feature) + run_adjust_loop(feature, file_selector) check_for_unstaged_changes(repository) - run_task_loop(project_path, feature, repository, ai) + run_task_loop(project_path, feature, repository, ai, file_selector) def review_changes( @@ -184,6 +187,7 @@ def review_changes( feature: Feature, repository: Repository, ai: AI, + file_selector: FileSelector, ): completer = WordCompleter(["r", "c", "u"], ignore_case=True) @@ -201,9 +205,9 @@ def review_changes( ).lower() if result == "r": - run_task(repository, project_path, feature, ai) + run_task(repository, project_path, feature, ai, file_selector) if result == "c": - complete_task(repository, project_path, feature, ai) + complete_task(repository, project_path, feature, ai, file_selector) if result == "x": print("exiting...") return diff --git a/gpt_engineer/applications/interactive_cli/agents/chat_agent.py b/gpt_engineer/applications/interactive_cli/agents/chat_agent.py index bbb3a78bc9..247d5be7ba 100644 --- a/gpt_engineer/applications/interactive_cli/agents/chat_agent.py +++ b/gpt_engineer/applications/interactive_cli/agents/chat_agent.py @@ -3,6 +3,7 @@ from gpt_engineer.applications.interactive_cli.feature import Feature from gpt_engineer.applications.interactive_cli.repository import Repository from gpt_engineer.applications.interactive_cli.files import Files +from gpt_engineer.applications.interactive_cli.file_selection import FileSelector from gpt_engineer.applications.interactive_cli.agents.agent_steps import ( update_user_file_selection, ) @@ -19,17 +20,19 @@ def __init__( project_path: str, feature: Feature, repository: Repository, + file_selector: FileSelector, ): self.ai = ai self.project_path = project_path self.feature = feature self.repository = repository + self.file_selector = file_selector def start(self): - update_user_file_selection(self.feature.file_selector) + update_user_file_selection(self.file_selector) - selected_files = self.feature.file_selector.get_from_yaml().included_files + selected_files = self.file_selector.get_from_yaml().included_files files = Files(self.project_path, selected_files) diff --git a/gpt_engineer/applications/interactive_cli/agents/feature_agent.py b/gpt_engineer/applications/interactive_cli/agents/feature_agent.py index b75003c0da..20f5f79856 100644 --- a/gpt_engineer/applications/interactive_cli/agents/feature_agent.py +++ b/gpt_engineer/applications/interactive_cli/agents/feature_agent.py @@ -1,6 +1,7 @@ from gpt_engineer.applications.interactive_cli.feature import Feature from gpt_engineer.applications.interactive_cli.repository import Repository from gpt_engineer.applications.interactive_cli.domain import Settings +from gpt_engineer.applications.interactive_cli.file_selection import FileSelector from gpt_engineer.applications.interactive_cli.agents.agent_steps import ( initialize_new_feature, update_user_file_selection, @@ -25,11 +26,13 @@ def __init__( project_path: str, feature: Feature, repository: Repository, + file_selector: FileSelector, ): self.ai = ai self.project_path = project_path self.feature = feature self.repository = repository + self.file_selector = file_selector def init(self, settings: Settings): @@ -37,7 +40,7 @@ def init(self, settings: Settings): self.ai, self.feature, self.repository, settings.no_branch ) - update_user_file_selection(self.feature.file_selector) + update_user_file_selection(self.file_selector) update_task_description(self.feature) @@ -45,11 +48,17 @@ def init(self, settings: Settings): def resume(self, settings: Settings): - run_adjust_loop(self.feature) + run_adjust_loop(self.feature, self.file_selector) check_for_unstaged_changes(self.repository) - run_task_loop(self.project_path, self.feature, self.repository, self.ai) + run_task_loop( + self.project_path, + self.feature, + self.repository, + self.ai, + self.file_selector, + ) def improve(self): self.resume() diff --git a/gpt_engineer/applications/interactive_cli/agents/task_agent.py b/gpt_engineer/applications/interactive_cli/agents/task_agent.py index 78f7f1499c..7833211fcb 100644 --- a/gpt_engineer/applications/interactive_cli/agents/task_agent.py +++ b/gpt_engineer/applications/interactive_cli/agents/task_agent.py @@ -1 +1,94 @@ -# todo : write task agent +from gpt_engineer.applications.interactive_cli.task import Task +from gpt_engineer.applications.interactive_cli.repository import Repository +from gpt_engineer.applications.interactive_cli.files import Files +from gpt_engineer.applications.interactive_cli.file_selection import FileSelector +from gpt_engineer.applications.interactive_cli.agents.agent_steps import ( + adjust_prompt_files, + check_for_unstaged_changes, + update_user_file_selection, +) + +from gpt_engineer.core.ai import AI +from gpt_engineer.core.prompt import Prompt +from gpt_engineer.core.default.steps import improve_fn, handle_improve_mode +from gpt_engineer.core.default.disk_memory import DiskMemory +from gpt_engineer.core.default.paths import PREPROMPTS_PATH, memory_path +from gpt_engineer.core.preprompts_holder import PrepromptsHolder + +from prompt_toolkit import prompt as cli_input + + +class TaskAgent: + """ + A cli agent which implements a one off task + """ + + def __init__( + self, + ai: AI, + project_path: str, + task: Task, + repository: Repository, + file_selector: FileSelector, + ): + self.ai = ai + self.project_path = project_path + self.task = task + self.repository = repository + self.file_selector = file_selector + + def _confirm__task_with_user(self): + file_selector = self.file_selector + file_selector.update_yaml_from_tracked_files() + file_string = file_selector.get_pretty_selected_from_yaml() + + task = self.task.get_task() + + print(f"Files: \n\nrepo\n{file_string}\n\n") + print(f"Task: {task}\n\n") + + # do you want to attempt this task? + if cli_input("Do you want to implement this task? y/n: ").lower() in [ + "y", + "yes", + ]: + return True + + return False + + def _run_improve_mode(self): + memory = DiskMemory(memory_path(self.project_path)) + preprompts_holder = PrepromptsHolder(PREPROMPTS_PATH) + + prompt = Prompt(self.task.get_task()) + + selected_files = self.file_selector.get_from_yaml().included_files + + files = Files(self.project_path, selected_files) + + improve_lambda = lambda: improve_fn( + self.ai, prompt, files, memory, preprompts_holder + ) + + print("\n---- begining code generation ----\n") + updated_files_dictionary = handle_improve_mode(improve_lambda, memory) + print("\n---- ending code generation ----\n") + + files.write_to_disk(updated_files_dictionary) + + def run(self): + + self.task.open_task_in_editor() + input("Please edit the task file and then press Enter to continue...") + + update_user_file_selection(self.file_selector) + + implement = self._confirm__task_with_user() + + while not implement: + adjust_prompt_files() + implement = self._confirm__task_with_user() + + check_for_unstaged_changes(self.repository) + + self._run_improve_mode() diff --git a/gpt_engineer/applications/interactive_cli/feature.py b/gpt_engineer/applications/interactive_cli/feature.py index eb359893b7..6c311a906f 100644 --- a/gpt_engineer/applications/interactive_cli/feature.py +++ b/gpt_engineer/applications/interactive_cli/feature.py @@ -6,6 +6,7 @@ from typing import Union from gpt_engineer.core.default.disk_memory import DiskMemory +from gpt_engineer.core.default.paths import memory_path from gpt_engineer.applications.interactive_cli.file_selection import FileSelector from gpt_engineer.applications.interactive_cli.repository import Repository @@ -21,15 +22,14 @@ class Feature(DiskMemory): def __init__(self, project_path: Union[str, Path], repository: Repository): - self._feature_path = Path(project_path) / ".feature" + self._feature_path = Path(memory_path(project_path)) / "feature" + self.path = self._feature_path self._feature_filename = "feature.md" self._progress_filename = "progress.json" self._task_filename = "task.md" - self._files_filename = "files.yml" - self.file_selector = FileSelector( - Path(project_path) / ".feature" / self._files_filename, repository - ) + if not os.path.exists(self._feature_path): + os.makedirs(self._feature_path) super().__init__(self._feature_path) diff --git a/gpt_engineer/applications/interactive_cli/file_selection.py b/gpt_engineer/applications/interactive_cli/file_selection.py index d4157b2b1b..306a70ac3b 100644 --- a/gpt_engineer/applications/interactive_cli/file_selection.py +++ b/gpt_engineer/applications/interactive_cli/file_selection.py @@ -2,9 +2,13 @@ import platform import subprocess import yaml +from pathlib import Path -from gpt_engineer.applications.interactive_cli.repository import Repository + +from gpt_engineer.core.default.paths import memory_path from gpt_engineer.core.ai import AI + +from gpt_engineer.applications.interactive_cli.repository import Repository from gpt_engineer.applications.interactive_cli.generation_tools import ( fuzzy_parse_file_selection, ) @@ -150,10 +154,10 @@ class FileSelector: Manages the active files in a project directory and creates a YAML file listing them. """ - def __init__(self, yaml_path: str, repository: Repository): + def __init__(self, project_path: str, repository: Repository): self.ai = AI("gpt-4o", temperature=0) self.repository = repository - self.yaml_path = yaml_path + self.yaml_path = Path(memory_path(project_path)) / "files.yml" if os.path.exists(self.yaml_path): return diff --git a/gpt_engineer/applications/interactive_cli/generation_tools.py b/gpt_engineer/applications/interactive_cli/generation_tools.py index 95dd3717c3..8ef41d4a3b 100644 --- a/gpt_engineer/applications/interactive_cli/generation_tools.py +++ b/gpt_engineer/applications/interactive_cli/generation_tools.py @@ -55,24 +55,28 @@ def parse_task_xml_to_class(xml_data): return response -def build_context_string(feature, git_context): +def build_git_context_string(git_context): + return f"""## Git Context - these are the code changes made so far while implementing this feature. This may include work completed by you on previous tasks as well as changes made independently by me. +### Branch Changes - this is the cumulative diff of all the commits so far on the feature branch. +{git_context.branch_changes} + +### Staged Changes - this is the diff of the current staged changes. +{git_context.staged_changes}""" + + +def build_feature_context_string(feature, git_context): return f"""## Feature - this is the description fo the current feature we are working on. {feature.get_description()} ## Completed Tasks - these are the lists of tasks you have completed so far on the feature branch. {feature.get_progress()["done"]} -## Git Context - these are the code changes made so far while implementing this feature. This may include work completed by you on previous tasks as well as changes made independently by me. -### Branch Changes - this is the cumulative diff of all the commits so far on the feature branch. -{git_context.branch_changes} - -### Staged Changes - this is the diff of the current staged changes. -{git_context.staged_changes} +{build_git_context_string(git_context)} """ def build_files_context_string(feature, git_context, files): - return f"""{build_context_string(feature, git_context)} + return f"""{build_feature_context_string(feature, git_context)} ## Current Codebase - this is the as is view of the current code base including any unstaged changes. {files.to_chat()} diff --git a/gpt_engineer/applications/interactive_cli/main.py b/gpt_engineer/applications/interactive_cli/main.py index 90548e2589..befc0f7b75 100644 --- a/gpt_engineer/applications/interactive_cli/main.py +++ b/gpt_engineer/applications/interactive_cli/main.py @@ -1,28 +1,23 @@ import typer from dotenv import load_dotenv +from pathlib import Path +from gpt_engineer.core.default.paths import memory_path +from gpt_engineer.applications.interactive_cli.agents.task_agent import TaskAgent from gpt_engineer.applications.interactive_cli.agents.feature_agent import FeatureAgent from gpt_engineer.applications.interactive_cli.agents.chat_agent import ChatAgent from gpt_engineer.applications.interactive_cli.feature import Feature +from gpt_engineer.applications.interactive_cli.task import Task from gpt_engineer.applications.interactive_cli.repository import Repository from gpt_engineer.applications.interactive_cli.domain import Settings +from gpt_engineer.applications.interactive_cli.file_selection import FileSelector + from gpt_engineer.core.ai import AI app = typer.Typer() -# @app.command() -# def task( -# new: bool = typer.Option(False, "--new", "-n", help="Initialize a new task."), -# **options, -# ): -# """ -# Handle tasks in the project. -# """ -# # TO BE IMPLEMENTED - - @app.command() def feature( new: bool = typer.Option(False, "--new", "-n", help="Initialize a new feature."), @@ -68,7 +63,9 @@ def feature( feature = Feature(project_path, repository) - agent = FeatureAgent(ai, project_path, feature, repository) + file_selector = FileSelector(project_path, repository) + + agent = FeatureAgent(ai, project_path, feature, repository, file_selector) settings = Settings(no_branch) @@ -90,7 +87,7 @@ def chat( ), ): """ - Initiate a chat about the current repository. + Initiate a chat about the current repository and feature context """ ai = AI( model_name=model, @@ -100,10 +97,49 @@ def chat( repository = Repository(project_path) feature = Feature(project_path, repository) - chat_agent = ChatAgent(ai, project_path, feature, repository) + + file_selector = FileSelector(project_path, repository) + + chat_agent = ChatAgent(ai, project_path, feature, repository, file_selector) chat_agent.start() if __name__ == "__main__": app() + + +@app.command() +def task( + project_path: str = typer.Option(".", "--path", "-p", help="Path to the project."), + model: str = typer.Option("gpt-4o", "--model", "-m", help="Model ID string."), + temperature: float = typer.Option( + 0.1, + "--temperature", + "-t", + help="Controls randomness: lower values for more focused, deterministic outputs.", + ), +): + """ + Implement a simple one off task without feature context + """ + load_dotenv() + + ai = AI( + model_name=model, + temperature=temperature, + ) + + repository = Repository(project_path) + + task = Task(project_path, repository) + + file_selector = FileSelector(project_path, repository) + + task_agent = TaskAgent(ai, project_path, task, repository, file_selector) + + task_agent.run() + + # review + + # task.delete() diff --git a/gpt_engineer/applications/interactive_cli/task.py b/gpt_engineer/applications/interactive_cli/task.py new file mode 100644 index 0000000000..7c76526c1f --- /dev/null +++ b/gpt_engineer/applications/interactive_cli/task.py @@ -0,0 +1,79 @@ +import json +import os +import platform +import subprocess +import shutil +from pathlib import Path +from typing import Union + +from gpt_engineer.core.default.disk_memory import DiskMemory +from gpt_engineer.applications.interactive_cli.file_selection import FileSelector +from gpt_engineer.applications.interactive_cli.repository import Repository + + +class Task(DiskMemory): + """ + Represents a task that will be done one off without the wider context of a feature + """ + + def __init__(self, project_path: Union[str, Path], repository: Repository): + + self._task_path = Path(project_path) / ".task" + self.path = self._task_path + self._task_filename = "task.md" + self._files_filename = "files.yml" + + if not os.path.exists(self._task_path): + os.makedirs(self._task_path) + + self.set_task("Please replace with task description") + + super().__init__(self._task_path) + + def delete(self): + shutil.rmtree(self._task_path) + + def set_task(self, task: str): + """ + Updates the task file with new text. + + Parameters + ---------- + task : str + The new task to write to the feature file. + """ + super().__setitem__(self._task_filename, task) + + def get_task(self) -> str: + """ + Retrieve the content of the task file in the database. + + Returns + ------- + str + The content of the feature file. + """ + return super().__getitem__(self._task_filename) + + def _file_path(self, filename): + return self._task_path / filename + + def _open_file_in_editor(self, path): + """ + Opens the generated YAML file in the default system editor. + If the YAML file is empty or doesn't exist, generate it first. + """ + + # Platform-specific methods to open the file + if platform.system() == "Windows": + os.startfile(path) + elif platform.system() == "Darwin": + subprocess.run(["open", path]) + else: # Linux and other Unix-like systems + subprocess.run(["xdg-open", path]) + + def open_task_in_editor(self): + """ + Opens the task file in the default system editor. + """ + self._open_file_in_editor(self._file_path(self._task_filename)) From 2142be4834c7cafe20ea273445243718d470e04f Mon Sep 17 00:00:00 2001 From: Theo McCabe Date: Thu, 6 Jun 2024 14:40:37 +0100 Subject: [PATCH 30/36] roughly working --- .../interactive_cli/agents/agent_steps.py | 57 ++++++++++++++++--- .../interactive_cli/agents/feature_agent.py | 23 +++++++- .../applications/interactive_cli/feature.py | 35 +++++++++--- .../interactive_cli/file_selection.py | 7 +++ .../interactive_cli/generation_tools.py | 33 ++++++++--- .../applications/interactive_cli/main.py | 2 +- .../applications/interactive_cli/task.py | 5 +- pyproject.toml | 1 + 8 files changed, 133 insertions(+), 30 deletions(-) diff --git a/gpt_engineer/applications/interactive_cli/agents/agent_steps.py b/gpt_engineer/applications/interactive_cli/agents/agent_steps.py index 9c19d95dea..5c09f13f43 100644 --- a/gpt_engineer/applications/interactive_cli/agents/agent_steps.py +++ b/gpt_engineer/applications/interactive_cli/agents/agent_steps.py @@ -5,6 +5,7 @@ from gpt_engineer.applications.interactive_cli.generation_tools import ( generate_branch_name, build_feature_context_string, + generate_suggested_tasks, ) from gpt_engineer.core.ai import AI @@ -17,8 +18,11 @@ from prompt_toolkit import prompt as cli_input from prompt_toolkit.validation import ValidationError, Validator from prompt_toolkit import PromptSession as InputSession +from prompt_toolkit.formatted_text import FormattedText from prompt_toolkit.completion import WordCompleter +from yaspin import yaspin + # This is a random comment to prove the assistant works class FeatureValidator(Validator): @@ -62,11 +66,52 @@ def update_feature_description(feature: Feature): def update_task_description(feature: Feature): - feature.open_task_in_editor() input("Please edit the task file and then press Enter to continue...") +def initiate_new_task(ai, feature, git_context, file_selector): + + files = file_selector.get_included_as_file_repository() + + try: + with yaspin(text="Generating suggested tasks...") as spinner: + response = generate_suggested_tasks(ai, feature, git_context, files) + spinner.ok("✔") # Success message + except Exception as e: + raise RuntimeError("Error generating task suggestions.") from e + + tasks = response.tasks + + max_tasks = min(len(tasks), 3) + options = [str(i + 1) for i in range(max_tasks)] + ["c"] + completer = WordCompleter(options, ignore_case=True) + + task_list_message = "\n".join([f"{i + 1}: {tasks[i]}" for i in range(max_tasks)]) + + def get_prompt(): + return [ + ("class:text", response.planning_thoughts), + ( + "class:text", + "\n\nWould you like to work on one of these suggested tasks or choose your own?\n", + ), + ("class:text", task_list_message), + ("class:text", "\nc: Custom task\n"), + ] + + session = InputSession() + result = session.prompt(FormattedText(get_prompt()), completer=completer).lower() + + if result in options[:-1]: + selected_task = tasks[int(result) - 1] + print(f"Selected task: {selected_task}") + feature.set_task(selected_task) + + if result == "c": + update_task_description(feature) + + def check_for_unstaged_changes( repository: Repository, ): @@ -135,9 +180,7 @@ def run_task_loop( prompt = Prompt(feature.get_task(), prefix="Task: ") - selected_files = file_selector.get_from_yaml().included_files - - files = Files(project_path, selected_files) + files = file_selector.get_included_as_file_repository() improve_lambda = lambda: improve_fn( ai, prompt, files, memory, preprompts_holder, feature_agent_context @@ -172,10 +215,10 @@ def complete_task(repository, project_path, feature, ai, file_selector): repository.stage_all_changes() feature.complete_task() file_selector.update_yaml_from_tracked_files() - print("Continuing with next task...") - update_task_description(feature) + git_context = repository.get_git_context() - run_adjust_loop(feature, file_selector) + print("Continuing with next task...") + initiate_new_task(ai, feature, git_context, file_selector) check_for_unstaged_changes(repository) diff --git a/gpt_engineer/applications/interactive_cli/agents/feature_agent.py b/gpt_engineer/applications/interactive_cli/agents/feature_agent.py index 20f5f79856..bdbbd14b64 100644 --- a/gpt_engineer/applications/interactive_cli/agents/feature_agent.py +++ b/gpt_engineer/applications/interactive_cli/agents/feature_agent.py @@ -8,8 +8,9 @@ check_for_unstaged_changes, run_task_loop, run_adjust_loop, - update_task_description, + initiate_new_task, ) +from prompt_toolkit import prompt as cli_input from gpt_engineer.core.ai import AI from gpt_engineer.core.base_agent import BaseAgent @@ -42,11 +43,29 @@ def init(self, settings: Settings): update_user_file_selection(self.file_selector) - update_task_description(self.feature) + initiate_new_task(self.ai, self.feature, None, self.file_selector) self.resume(settings) def resume(self, settings: Settings): + if self.feature.has_task(): + if cli_input( + "Complete current task and initiate new task? y/n: " + ).lower() in [ + "n", + "no", + ]: + check_for_unstaged_changes(self.repository) + + run_task_loop( + self.project_path, + self.feature, + self.repository, + self.ai, + self.file_selector, + ) + + initiate_new_task(self.ai, self.feature, None, self.file_selector) run_adjust_loop(self.feature, self.file_selector) diff --git a/gpt_engineer/applications/interactive_cli/feature.py b/gpt_engineer/applications/interactive_cli/feature.py index 6c311a906f..21b2f0d111 100644 --- a/gpt_engineer/applications/interactive_cli/feature.py +++ b/gpt_engineer/applications/interactive_cli/feature.py @@ -28,21 +28,21 @@ def __init__(self, project_path: Union[str, Path], repository: Repository): self._progress_filename = "progress.json" self._task_filename = "task.md" + self._feature_placeholder = """Please replace with your own feature description. Markdown is supported. + +Hint: +Improve your prompts by including technical references to any APIs, libraries, components etc that the pre trained model may not know about in detail already.""" + + self._task_placeholder = "Please replace with a task description - directing the AI on the first task to implement on this feature" + if not os.path.exists(self._feature_path): os.makedirs(self._feature_path) super().__init__(self._feature_path) def clear_feature(self) -> None: - self.set_description( - """Please replace with your own feature description. Markdown is supported. - -Hint: -Improve your prompts by including technical references to any APIs, libraries, components etc that the pre trained model may not know about in detail already.""" - ) - self.set_task( - "Please replace with a task description - directing the AI on the first task to implement on this feature" - ) + self.set_description(self._feature_placeholder) + self.set_task(self._task_placeholder) super().__setitem__(self._progress_filename, json.dumps({"done": []})) def get_description(self) -> str: @@ -124,6 +124,23 @@ def get_task(self) -> str: """ return super().__getitem__(self._task_filename) + def has_task(self) -> bool: + """ + Retrieve the content of the feature file in the database. + + Returns + ------- + str + The content of the feature file. + """ + + task = self.get_task() + + if task and not task == self._task_placeholder: + return True + + return False + def complete_task(self): """ Moves the current task to the 'done' list in the progress.json file and clears the task file. diff --git a/gpt_engineer/applications/interactive_cli/file_selection.py b/gpt_engineer/applications/interactive_cli/file_selection.py index 306a70ac3b..a1db0e4254 100644 --- a/gpt_engineer/applications/interactive_cli/file_selection.py +++ b/gpt_engineer/applications/interactive_cli/file_selection.py @@ -9,6 +9,7 @@ from gpt_engineer.core.ai import AI from gpt_engineer.applications.interactive_cli.repository import Repository +from gpt_engineer.applications.interactive_cli.files import Files from gpt_engineer.applications.interactive_cli.generation_tools import ( fuzzy_parse_file_selection, ) @@ -155,6 +156,7 @@ class FileSelector: """ def __init__(self, project_path: str, repository: Repository): + self.project_path = project_path self.ai = AI("gpt-4o", temperature=0) self.repository = repository self.yaml_path = Path(memory_path(project_path)) / "files.yml" @@ -311,3 +313,8 @@ def open_yaml_in_editor(self): subprocess.run(["open", self.yaml_path]) else: # Linux and other Unix-like systems subprocess.run(["xdg-open", self.yaml_path]) + + def get_included_as_file_repository(self): + file_selection = self.get_from_yaml() + + return Files(self.project_path, file_selection.included_files) diff --git a/gpt_engineer/applications/interactive_cli/generation_tools.py b/gpt_engineer/applications/interactive_cli/generation_tools.py index 8ef41d4a3b..c7835f2719 100644 --- a/gpt_engineer/applications/interactive_cli/generation_tools.py +++ b/gpt_engineer/applications/interactive_cli/generation_tools.py @@ -65,15 +65,21 @@ def build_git_context_string(git_context): def build_feature_context_string(feature, git_context): - return f"""## Feature - this is the description fo the current feature we are working on. + feature_string = f"""## Feature - this is the description fo the current feature we are working on. {feature.get_description()} ## Completed Tasks - these are the lists of tasks you have completed so far on the feature branch. {feature.get_progress()["done"]} +""" + + if git_context: + return f"""{feature_string} {build_git_context_string(git_context)} """ + return feature_string + def build_files_context_string(feature, git_context, files): return f"""{build_feature_context_string(feature, git_context)} @@ -86,11 +92,15 @@ def build_files_context_string(feature, git_context, files): def generate_suggested_tasks(ai: AI, feature, git_context, files) -> str: system_prompt = """ You are a software engineer work planning tool. Given a feature description, a list of tasks already completed, and sections of the code -repository we are working on, suggest a list of tasks to be done in order to move towards the end goal of completing the feature. +repository we are working on, suggest a list of implementation tasks to be done in order to move towards the end goal of completing the feature. -First start by outputting your planning thoughts: an overview of what we are trying to achieve, what we have achieved so far, and what is left to be done. +An implementation task consists of actually writing some code - and doesnt include review or research tasks, or any other activity other tha writing code. -Then output the list of tasks to be done. Please try to keep the tasks small, actionable and independantly commitable. +First start by outputting your planning thoughts: an overview of what we are trying to achieve, what we have achieved so far, and what implementation tasks are left to be done. + +Then output the list of between 0 and 3 implementation tasks to be done which get us closer to our goal. Please try to keep the tasks small, actionable and independantly commitable. + +We only need to move towards our goal with these tasks, we dont have to complete the feature in these 3 steps. The output format will be XML as follows: @@ -99,10 +109,13 @@ def generate_suggested_tasks(ai: AI, feature, git_context, files) -> str: - + - + + + + @@ -112,19 +125,21 @@ def generate_suggested_tasks(ai: AI, feature, git_context, files) -> str: Respond in XML and nothing else. + +You may send as as little as 0 tasks and as many as 3. If you believe the feature is complete, send 0 tasks. """ input = build_files_context_string(feature, git_context, files) - # ai.llm.callbacks.clear() # silent + ai.llm.callbacks.clear() # silent messages = ai.start(system_prompt, input, step_name="suggest-tasks") - # ai.llm.callbacks.append(StreamingStdOutCallbackHandler()) + ai.llm.callbacks.append(StreamingStdOutCallbackHandler()) xml = messages[-1].content.strip() - return parse_task_xml_to_class(xml).tasks + return parse_task_xml_to_class(xml) def fuzzy_parse_file_selection(ai: AI, yaml_string: str) -> FileSelection: diff --git a/gpt_engineer/applications/interactive_cli/main.py b/gpt_engineer/applications/interactive_cli/main.py index befc0f7b75..5f2a0a6963 100644 --- a/gpt_engineer/applications/interactive_cli/main.py +++ b/gpt_engineer/applications/interactive_cli/main.py @@ -132,7 +132,7 @@ def task( repository = Repository(project_path) - task = Task(project_path, repository) + task = Task(project_path) file_selector = FileSelector(project_path, repository) diff --git a/gpt_engineer/applications/interactive_cli/task.py b/gpt_engineer/applications/interactive_cli/task.py index 7c76526c1f..c6b9f0911d 100644 --- a/gpt_engineer/applications/interactive_cli/task.py +++ b/gpt_engineer/applications/interactive_cli/task.py @@ -7,6 +7,7 @@ from typing import Union from gpt_engineer.core.default.disk_memory import DiskMemory +from gpt_engineer.core.default.paths import memory_path from gpt_engineer.applications.interactive_cli.file_selection import FileSelector from gpt_engineer.applications.interactive_cli.repository import Repository @@ -16,9 +17,9 @@ class Task(DiskMemory): Represents a task that will be done one off without the wider context of a feature """ - def __init__(self, project_path: Union[str, Path], repository: Repository): + def __init__(self, project_path: Union[str, Path]): - self._task_path = Path(project_path) / ".task" + self._task_path = Path(memory_path(project_path)) / "task" self.path = self._task_path self._task_filename = "task.md" self._files_filename = "files.yml" diff --git a/pyproject.toml b/pyproject.toml index 5b0e270462..8512635977 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -37,6 +37,7 @@ regex = "^2023.12.25" pillow = "^10.2.0" datasets = "^2.17.1" black = "23.3.0" +gitpython = "^3.0.0" [tool.poetry.group.dev.dependencies] pytest = ">=7.3.1" From 7ffc87e2b08bb30255d50ee03285b7664fa02ead Mon Sep 17 00:00:00 2001 From: Theo McCabe Date: Thu, 6 Jun 2024 16:07:00 +0100 Subject: [PATCH 31/36] WIP --- .../interactive_cli/agents/agent_steps.py | 10 +- .../interactive_cli/agents/chat_agent.py | 12 +- .../interactive_cli/agents/feature_agent.py | 10 +- .../interactive_cli/agents/task_agent.py | 10 +- .../applications/interactive_cli/feature.py | 23 +- .../interactive_cli/file_selection.py | 8 +- .../interactive_cli/generation_tools.py | 2 +- .../applications/interactive_cli/main.py | 18 +- .../applications/interactive_cli/task.py | 4 +- .../interactive_cli_loop/__init__.py | 0 .../interactive_cli_loop/agents/__init__.py | 0 .../agents/agent_steps.py | 256 ++++++++++ .../interactive_cli_loop/agents/chat_agent.py | 68 +++ .../agents/feature_agent.py | 83 +++ .../interactive_cli_loop/agents/task_agent.py | 94 ++++ .../interactive_cli_loop/domain.py | 13 + .../interactive_cli_loop/example_project | 1 + .../interactive_cli_loop/feature.py | 181 +++++++ .../interactive_cli_loop/file_selection.py | 320 ++++++++++++ .../interactive_cli_loop/files.py | 40 ++ .../interactive_cli_loop/generation_tools.py | 483 ++++++++++++++++++ .../applications/interactive_cli_loop/main.py | 147 ++++++ .../interactive_cli_loop/prompts/__init__.py | 0 .../prompts/fuzzy_file_parser | 320 ++++++++++++ .../interactive_cli_loop/repository.py | 151 ++++++ .../applications/interactive_cli_loop/task.py | 80 +++ .../interactive_cli/test_file_selection.py | 4 +- 27 files changed, 2292 insertions(+), 46 deletions(-) create mode 100644 gpt_engineer/applications/interactive_cli_loop/__init__.py create mode 100644 gpt_engineer/applications/interactive_cli_loop/agents/__init__.py create mode 100644 gpt_engineer/applications/interactive_cli_loop/agents/agent_steps.py create mode 100644 gpt_engineer/applications/interactive_cli_loop/agents/chat_agent.py create mode 100644 gpt_engineer/applications/interactive_cli_loop/agents/feature_agent.py create mode 100644 gpt_engineer/applications/interactive_cli_loop/agents/task_agent.py create mode 100644 gpt_engineer/applications/interactive_cli_loop/domain.py create mode 160000 gpt_engineer/applications/interactive_cli_loop/example_project create mode 100644 gpt_engineer/applications/interactive_cli_loop/feature.py create mode 100644 gpt_engineer/applications/interactive_cli_loop/file_selection.py create mode 100644 gpt_engineer/applications/interactive_cli_loop/files.py create mode 100644 gpt_engineer/applications/interactive_cli_loop/generation_tools.py create mode 100644 gpt_engineer/applications/interactive_cli_loop/main.py create mode 100644 gpt_engineer/applications/interactive_cli_loop/prompts/__init__.py create mode 100644 gpt_engineer/applications/interactive_cli_loop/prompts/fuzzy_file_parser create mode 100644 gpt_engineer/applications/interactive_cli_loop/repository.py create mode 100644 gpt_engineer/applications/interactive_cli_loop/task.py diff --git a/gpt_engineer/applications/interactive_cli/agents/agent_steps.py b/gpt_engineer/applications/interactive_cli/agents/agent_steps.py index 5c09f13f43..3d9d2d25b7 100644 --- a/gpt_engineer/applications/interactive_cli/agents/agent_steps.py +++ b/gpt_engineer/applications/interactive_cli/agents/agent_steps.py @@ -1,8 +1,8 @@ -from gpt_engineer.applications.interactive_cli.feature import Feature -from gpt_engineer.applications.interactive_cli.file_selection import FileSelector -from gpt_engineer.applications.interactive_cli.repository import Repository -from gpt_engineer.applications.interactive_cli.files import Files -from gpt_engineer.applications.interactive_cli.generation_tools import ( +from gpt_engineer.applications.interactive_cli_loop.feature import Feature +from gpt_engineer.applications.interactive_cli_loop.file_selection import FileSelector +from gpt_engineer.applications.interactive_cli_loop.repository import Repository +from gpt_engineer.applications.interactive_cli_loop.files import Files +from gpt_engineer.applications.interactive_cli_loop.generation_tools import ( generate_branch_name, build_feature_context_string, generate_suggested_tasks, diff --git a/gpt_engineer/applications/interactive_cli/agents/chat_agent.py b/gpt_engineer/applications/interactive_cli/agents/chat_agent.py index 247d5be7ba..9c92f00530 100644 --- a/gpt_engineer/applications/interactive_cli/agents/chat_agent.py +++ b/gpt_engineer/applications/interactive_cli/agents/chat_agent.py @@ -1,13 +1,13 @@ from gpt_engineer.core.ai import AI, HumanMessage, SystemMessage -from gpt_engineer.applications.interactive_cli.feature import Feature -from gpt_engineer.applications.interactive_cli.repository import Repository -from gpt_engineer.applications.interactive_cli.files import Files -from gpt_engineer.applications.interactive_cli.file_selection import FileSelector -from gpt_engineer.applications.interactive_cli.agents.agent_steps import ( +from gpt_engineer.applications.interactive_cli_loop.feature import Feature +from gpt_engineer.applications.interactive_cli_loop.repository import Repository +from gpt_engineer.applications.interactive_cli_loop.files import Files +from gpt_engineer.applications.interactive_cli_loop.file_selection import FileSelector +from gpt_engineer.applications.interactive_cli_loop.agents.agent_steps import ( update_user_file_selection, ) -from gpt_engineer.applications.interactive_cli.generation_tools import ( +from gpt_engineer.applications.interactive_cli_loop.generation_tools import ( build_files_context_string, ) diff --git a/gpt_engineer/applications/interactive_cli/agents/feature_agent.py b/gpt_engineer/applications/interactive_cli/agents/feature_agent.py index bdbbd14b64..36871d1b23 100644 --- a/gpt_engineer/applications/interactive_cli/agents/feature_agent.py +++ b/gpt_engineer/applications/interactive_cli/agents/feature_agent.py @@ -1,8 +1,8 @@ -from gpt_engineer.applications.interactive_cli.feature import Feature -from gpt_engineer.applications.interactive_cli.repository import Repository -from gpt_engineer.applications.interactive_cli.domain import Settings -from gpt_engineer.applications.interactive_cli.file_selection import FileSelector -from gpt_engineer.applications.interactive_cli.agents.agent_steps import ( +from gpt_engineer.applications.interactive_cli_loop.feature import Feature +from gpt_engineer.applications.interactive_cli_loop.repository import Repository +from gpt_engineer.applications.interactive_cli_loop.domain import Settings +from gpt_engineer.applications.interactive_cli_loop.file_selection import FileSelector +from gpt_engineer.applications.interactive_cli_loop.agents.agent_steps import ( initialize_new_feature, update_user_file_selection, check_for_unstaged_changes, diff --git a/gpt_engineer/applications/interactive_cli/agents/task_agent.py b/gpt_engineer/applications/interactive_cli/agents/task_agent.py index 7833211fcb..33fa6a7ce5 100644 --- a/gpt_engineer/applications/interactive_cli/agents/task_agent.py +++ b/gpt_engineer/applications/interactive_cli/agents/task_agent.py @@ -1,8 +1,8 @@ -from gpt_engineer.applications.interactive_cli.task import Task -from gpt_engineer.applications.interactive_cli.repository import Repository -from gpt_engineer.applications.interactive_cli.files import Files -from gpt_engineer.applications.interactive_cli.file_selection import FileSelector -from gpt_engineer.applications.interactive_cli.agents.agent_steps import ( +from gpt_engineer.applications.interactive_cli_loop.task import Task +from gpt_engineer.applications.interactive_cli_loop.repository import Repository +from gpt_engineer.applications.interactive_cli_loop.files import Files +from gpt_engineer.applications.interactive_cli_loop.file_selection import FileSelector +from gpt_engineer.applications.interactive_cli_loop.agents.agent_steps import ( adjust_prompt_files, check_for_unstaged_changes, update_user_file_selection, diff --git a/gpt_engineer/applications/interactive_cli/feature.py b/gpt_engineer/applications/interactive_cli/feature.py index 21b2f0d111..741e5d2214 100644 --- a/gpt_engineer/applications/interactive_cli/feature.py +++ b/gpt_engineer/applications/interactive_cli/feature.py @@ -7,8 +7,8 @@ from gpt_engineer.core.default.disk_memory import DiskMemory from gpt_engineer.core.default.paths import memory_path -from gpt_engineer.applications.interactive_cli.file_selection import FileSelector -from gpt_engineer.applications.interactive_cli.repository import Repository +from gpt_engineer.applications.interactive_cli_loop.file_selection import FileSelector +from gpt_engineer.applications.interactive_cli_loop.repository import Repository class Feature(DiskMemory): @@ -67,6 +67,18 @@ def set_description(self, feature_description: str): """ super().__setitem__(self._feature_filename, feature_description) + def has_description(self) -> bool: + """ + Does the feature have a description? + """ + + task = self.get_task() + + if task and not task == self._task_placeholder: + return True + + return False + def get_progress(self) -> dict: """ Retrieve the progress object. @@ -126,12 +138,7 @@ def get_task(self) -> str: def has_task(self) -> bool: """ - Retrieve the content of the feature file in the database. - - Returns - ------- - str - The content of the feature file. + Does the feature have an active task? """ task = self.get_task() diff --git a/gpt_engineer/applications/interactive_cli/file_selection.py b/gpt_engineer/applications/interactive_cli/file_selection.py index a1db0e4254..6ccfeed17c 100644 --- a/gpt_engineer/applications/interactive_cli/file_selection.py +++ b/gpt_engineer/applications/interactive_cli/file_selection.py @@ -8,12 +8,12 @@ from gpt_engineer.core.default.paths import memory_path from gpt_engineer.core.ai import AI -from gpt_engineer.applications.interactive_cli.repository import Repository -from gpt_engineer.applications.interactive_cli.files import Files -from gpt_engineer.applications.interactive_cli.generation_tools import ( +from gpt_engineer.applications.interactive_cli_loop.repository import Repository +from gpt_engineer.applications.interactive_cli_loop.files import Files +from gpt_engineer.applications.interactive_cli_loop.generation_tools import ( fuzzy_parse_file_selection, ) -from gpt_engineer.applications.interactive_cli.domain import FileSelection +from gpt_engineer.applications.interactive_cli_loop.domain import FileSelection def paths_to_tree(paths): diff --git a/gpt_engineer/applications/interactive_cli/generation_tools.py b/gpt_engineer/applications/interactive_cli/generation_tools.py index c7835f2719..af6c959bef 100644 --- a/gpt_engineer/applications/interactive_cli/generation_tools.py +++ b/gpt_engineer/applications/interactive_cli/generation_tools.py @@ -1,7 +1,7 @@ import xml.etree.ElementTree as ET import json -from gpt_engineer.applications.interactive_cli.domain import FileSelection +from gpt_engineer.applications.interactive_cli_loop.domain import FileSelection from gpt_engineer.core.ai import AI from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler diff --git a/gpt_engineer/applications/interactive_cli/main.py b/gpt_engineer/applications/interactive_cli/main.py index 5f2a0a6963..0a92d843a2 100644 --- a/gpt_engineer/applications/interactive_cli/main.py +++ b/gpt_engineer/applications/interactive_cli/main.py @@ -3,14 +3,16 @@ from pathlib import Path from gpt_engineer.core.default.paths import memory_path -from gpt_engineer.applications.interactive_cli.agents.task_agent import TaskAgent -from gpt_engineer.applications.interactive_cli.agents.feature_agent import FeatureAgent -from gpt_engineer.applications.interactive_cli.agents.chat_agent import ChatAgent -from gpt_engineer.applications.interactive_cli.feature import Feature -from gpt_engineer.applications.interactive_cli.task import Task -from gpt_engineer.applications.interactive_cli.repository import Repository -from gpt_engineer.applications.interactive_cli.domain import Settings -from gpt_engineer.applications.interactive_cli.file_selection import FileSelector +from gpt_engineer.applications.interactive_cli_loop.agents.task_agent import TaskAgent +from gpt_engineer.applications.interactive_cli_loop.agents.feature_agent import ( + FeatureAgent, +) +from gpt_engineer.applications.interactive_cli_loop.agents.chat_agent import ChatAgent +from gpt_engineer.applications.interactive_cli_loop.feature import Feature +from gpt_engineer.applications.interactive_cli_loop.task import Task +from gpt_engineer.applications.interactive_cli_loop.repository import Repository +from gpt_engineer.applications.interactive_cli_loop.domain import Settings +from gpt_engineer.applications.interactive_cli_loop.file_selection import FileSelector from gpt_engineer.core.ai import AI diff --git a/gpt_engineer/applications/interactive_cli/task.py b/gpt_engineer/applications/interactive_cli/task.py index c6b9f0911d..a4f46aa89c 100644 --- a/gpt_engineer/applications/interactive_cli/task.py +++ b/gpt_engineer/applications/interactive_cli/task.py @@ -8,8 +8,8 @@ from gpt_engineer.core.default.disk_memory import DiskMemory from gpt_engineer.core.default.paths import memory_path -from gpt_engineer.applications.interactive_cli.file_selection import FileSelector -from gpt_engineer.applications.interactive_cli.repository import Repository +from gpt_engineer.applications.interactive_cli_loop.file_selection import FileSelector +from gpt_engineer.applications.interactive_cli_loop.repository import Repository class Task(DiskMemory): diff --git a/gpt_engineer/applications/interactive_cli_loop/__init__.py b/gpt_engineer/applications/interactive_cli_loop/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/gpt_engineer/applications/interactive_cli_loop/agents/__init__.py b/gpt_engineer/applications/interactive_cli_loop/agents/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/gpt_engineer/applications/interactive_cli_loop/agents/agent_steps.py b/gpt_engineer/applications/interactive_cli_loop/agents/agent_steps.py new file mode 100644 index 0000000000..3d9d2d25b7 --- /dev/null +++ b/gpt_engineer/applications/interactive_cli_loop/agents/agent_steps.py @@ -0,0 +1,256 @@ +from gpt_engineer.applications.interactive_cli_loop.feature import Feature +from gpt_engineer.applications.interactive_cli_loop.file_selection import FileSelector +from gpt_engineer.applications.interactive_cli_loop.repository import Repository +from gpt_engineer.applications.interactive_cli_loop.files import Files +from gpt_engineer.applications.interactive_cli_loop.generation_tools import ( + generate_branch_name, + build_feature_context_string, + generate_suggested_tasks, +) + +from gpt_engineer.core.ai import AI +from gpt_engineer.core.prompt import Prompt +from gpt_engineer.core.default.steps import improve_fn, handle_improve_mode +from gpt_engineer.core.default.disk_memory import DiskMemory +from gpt_engineer.core.default.paths import PREPROMPTS_PATH, memory_path +from gpt_engineer.core.preprompts_holder import PrepromptsHolder + +from prompt_toolkit import prompt as cli_input +from prompt_toolkit.validation import ValidationError, Validator +from prompt_toolkit import PromptSession as InputSession +from prompt_toolkit.formatted_text import FormattedText +from prompt_toolkit.completion import WordCompleter + +from yaspin import yaspin + + +# This is a random comment to prove the assistant works +class FeatureValidator(Validator): + def validate(self, document): + text = document.text + if not text: + raise ValidationError( + message="Feature description cannot be empty", cursor_position=len(text) + ) + + +def initialize_new_feature( + ai: AI, feature: Feature, repository: Repository, no_branch: bool +): + feature.clear_feature() + + update_feature_description(feature) + + if not no_branch: + print("Creating feature branch... (this can be disabled with -nb setting)") + + branch_name = generate_branch_name(ai, feature.get_description()) + + branch_name = cli_input("\nConfirm branch name: ", default=branch_name) + + repository.create_branch(branch_name) + print("\nFeature branch created.\n") + + +def update_user_file_selection(file_selector: FileSelector): + file_selector.update_yaml_from_tracked_files() + file_selector.open_yaml_in_editor() + input( + "Please edit the file selection for this feature and then press Enter to continue..." + ) + + +def update_feature_description(feature: Feature): + feature.open_feature_in_editor() + input("Please edit the feature file and then press Enter to continue...") + + +def update_task_description(feature: Feature): + feature.open_task_in_editor() + input("Please edit the task file and then press Enter to continue...") + + +def initiate_new_task(ai, feature, git_context, file_selector): + + files = file_selector.get_included_as_file_repository() + + try: + with yaspin(text="Generating suggested tasks...") as spinner: + response = generate_suggested_tasks(ai, feature, git_context, files) + spinner.ok("✔") # Success message + except Exception as e: + raise RuntimeError("Error generating task suggestions.") from e + + tasks = response.tasks + + max_tasks = min(len(tasks), 3) + options = [str(i + 1) for i in range(max_tasks)] + ["c"] + completer = WordCompleter(options, ignore_case=True) + + task_list_message = "\n".join([f"{i + 1}: {tasks[i]}" for i in range(max_tasks)]) + + def get_prompt(): + return [ + ("class:text", response.planning_thoughts), + ( + "class:text", + "\n\nWould you like to work on one of these suggested tasks or choose your own?\n", + ), + ("class:text", task_list_message), + ("class:text", "\nc: Custom task\n"), + ] + + session = InputSession() + result = session.prompt(FormattedText(get_prompt()), completer=completer).lower() + + if result in options[:-1]: + selected_task = tasks[int(result) - 1] + print(f"Selected task: {selected_task}") + feature.set_task(selected_task) + + if result == "c": + update_task_description(feature) + + +def check_for_unstaged_changes( + repository: Repository, +): + unstaged_changes = repository.get_unstaged_changes() + + if unstaged_changes: + if input( + "Unstaged changes present are you sure you want to proceed? y/n: " + ).lower() not in ["", "y", "yes"]: + print("Ok, not proceeding.") + return + + +def confirm_feature_context_and_task_with_user( + feature: Feature, file_selector: FileSelector +): + file_selector.update_yaml_from_tracked_files() + file_string = file_selector.get_pretty_selected_from_yaml() + + feature_description = feature.get_description() + task = feature.get_task() + + # list feature, files and task + print(f"Feature: {feature_description}\n\n") + print(f"Files: \n\nrepo\n{file_string}\n\n") + print(f"Task: {task}\n\n") + + # do you want to attempt this task? + if cli_input("Do you want to implement this task? y/n: ").lower() in [ + "y", + "yes", + ]: + return True + + return False + + +# todo : create a function which uses the test4.py example code approach to offer a selection of options to the user +# f - "edit feature" using update_feature_description step +# s - "edit file selection" using update_user_file_selection step +# t - "edit task" using update_task_description step +# c - complete the task and start a new one +# x - exit +def adjust_prompt_files(): + input("Please edit the prompt files and then press Enter to continue...") + + +def run_task_loop( + project_path, + feature: Feature, + repository: Repository, + ai: AI, + file_selector: FileSelector, +): + + memory = DiskMemory(memory_path(project_path)) + preprompts_holder = PrepromptsHolder(PREPROMPTS_PATH) + + context_string = build_feature_context_string(feature, repository.get_git_context()) + + feature_agent_context = f"""I am working on a feature but breaking it up into small incremental tasks. Your job is to complete the incremental task provided to you - only that task and nothing more. + +The purpose of this message is to give you wider context around the feature you are working on and what incremental tasks have already been completed so far. + +{context_string}""" + + prompt = Prompt(feature.get_task(), prefix="Task: ") + + files = file_selector.get_included_as_file_repository() + + improve_lambda = lambda: improve_fn( + ai, prompt, files, memory, preprompts_holder, feature_agent_context + ) + + print("\n---- begining code generation ----\n") + # Creates loop + updated_files_dictionary = handle_improve_mode(improve_lambda, memory) + print("\n---- ending code generation ----\n") + + files.write_to_disk(updated_files_dictionary) + + review_changes(project_path, feature, repository, ai, file_selector) + + +def run_adjust_loop(feature, file_selector): + implement = confirm_feature_context_and_task_with_user(feature, file_selector) + + while not implement: + adjust_prompt_files() + implement = confirm_feature_context_and_task_with_user(feature, file_selector) + + +def run_task(repository, project_path, feature, ai, file_selector): + print("Rerunning generation...") + check_for_unstaged_changes(repository) + run_task_loop(project_path, feature, repository, ai, file_selector) + + +def complete_task(repository, project_path, feature, ai, file_selector): + print("Completing task... ") + repository.stage_all_changes() + feature.complete_task() + file_selector.update_yaml_from_tracked_files() + git_context = repository.get_git_context() + + print("Continuing with next task...") + initiate_new_task(ai, feature, git_context, file_selector) + + check_for_unstaged_changes(repository) + + run_task_loop(project_path, feature, repository, ai, file_selector) + + +def review_changes( + project_path, + feature: Feature, + repository: Repository, + ai: AI, + file_selector: FileSelector, +): + + completer = WordCompleter(["r", "c", "u"], ignore_case=True) + session = InputSession() + + # Using prompt to get user input + result = session.prompt( + """Please review the unstaged changes generated by GPT Engineer.. + +r: Retry the task (incorporating changes to prompt files) +c: Complete task and stage changes +x: Exit +""", + completer=completer, + ).lower() + + if result == "r": + run_task(repository, project_path, feature, ai, file_selector) + if result == "c": + complete_task(repository, project_path, feature, ai, file_selector) + if result == "x": + print("exiting...") + return diff --git a/gpt_engineer/applications/interactive_cli_loop/agents/chat_agent.py b/gpt_engineer/applications/interactive_cli_loop/agents/chat_agent.py new file mode 100644 index 0000000000..9c92f00530 --- /dev/null +++ b/gpt_engineer/applications/interactive_cli_loop/agents/chat_agent.py @@ -0,0 +1,68 @@ +from gpt_engineer.core.ai import AI, HumanMessage, SystemMessage + +from gpt_engineer.applications.interactive_cli_loop.feature import Feature +from gpt_engineer.applications.interactive_cli_loop.repository import Repository +from gpt_engineer.applications.interactive_cli_loop.files import Files +from gpt_engineer.applications.interactive_cli_loop.file_selection import FileSelector +from gpt_engineer.applications.interactive_cli_loop.agents.agent_steps import ( + update_user_file_selection, +) +from gpt_engineer.applications.interactive_cli_loop.generation_tools import ( + build_files_context_string, +) + + +class ChatAgent: + + def __init__( + self, + ai: AI, + project_path: str, + feature: Feature, + repository: Repository, + file_selector: FileSelector, + ): + self.ai = ai + self.project_path = project_path + self.feature = feature + self.repository = repository + self.file_selector = file_selector + + def start(self): + + update_user_file_selection(self.file_selector) + + selected_files = self.file_selector.get_from_yaml().included_files + + files = Files(self.project_path, selected_files) + + context_string = build_files_context_string( + self.feature, self.repository.get_git_context(), files + ) + + system = f"""You are the chat function of an AI software engineering tool called gpt engineer. + +The tool takes a feature descriptioin, progress on the feature, git context, and repository files relevent to the feature +and based on that it suggests new tasks to complete in order to progress the feature, and it implements those tasks for the user. + +You are not that tool, you are the chat function of that tool. You are here to help the user discuss their code and their feature and understand discuss any part of it with you - a software engineering expert. + +Always provide advice as to best software engineering practices. + +Here is the context for your conversation: + +{context_string}""" + + messages = [ + SystemMessage(content=system), + HumanMessage(content="Hi"), + ] + + while True: + print("\nAI:") + response = self.ai.backoff_inference(messages) + messages.append(response) + + print("\n\nYou:") + user_message = input() + messages.append(HumanMessage(content=user_message)) diff --git a/gpt_engineer/applications/interactive_cli_loop/agents/feature_agent.py b/gpt_engineer/applications/interactive_cli_loop/agents/feature_agent.py new file mode 100644 index 0000000000..36871d1b23 --- /dev/null +++ b/gpt_engineer/applications/interactive_cli_loop/agents/feature_agent.py @@ -0,0 +1,83 @@ +from gpt_engineer.applications.interactive_cli_loop.feature import Feature +from gpt_engineer.applications.interactive_cli_loop.repository import Repository +from gpt_engineer.applications.interactive_cli_loop.domain import Settings +from gpt_engineer.applications.interactive_cli_loop.file_selection import FileSelector +from gpt_engineer.applications.interactive_cli_loop.agents.agent_steps import ( + initialize_new_feature, + update_user_file_selection, + check_for_unstaged_changes, + run_task_loop, + run_adjust_loop, + initiate_new_task, +) +from prompt_toolkit import prompt as cli_input + +from gpt_engineer.core.ai import AI +from gpt_engineer.core.base_agent import BaseAgent + + +class FeatureAgent(BaseAgent): + """ + A cli agent which implements a feature as a set of incremental tasks + """ + + def __init__( + self, + ai: AI, + project_path: str, + feature: Feature, + repository: Repository, + file_selector: FileSelector, + ): + self.ai = ai + self.project_path = project_path + self.feature = feature + self.repository = repository + self.file_selector = file_selector + + def init(self, settings: Settings): + + initialize_new_feature( + self.ai, self.feature, self.repository, settings.no_branch + ) + + update_user_file_selection(self.file_selector) + + initiate_new_task(self.ai, self.feature, None, self.file_selector) + + self.resume(settings) + + def resume(self, settings: Settings): + if self.feature.has_task(): + if cli_input( + "Complete current task and initiate new task? y/n: " + ).lower() in [ + "n", + "no", + ]: + check_for_unstaged_changes(self.repository) + + run_task_loop( + self.project_path, + self.feature, + self.repository, + self.ai, + self.file_selector, + ) + + initiate_new_task(self.ai, self.feature, None, self.file_selector) + + run_adjust_loop(self.feature, self.file_selector) + + check_for_unstaged_changes(self.repository) + + run_task_loop( + self.project_path, + self.feature, + self.repository, + self.ai, + self.file_selector, + ) + + def improve(self): + self.resume() diff --git a/gpt_engineer/applications/interactive_cli_loop/agents/task_agent.py b/gpt_engineer/applications/interactive_cli_loop/agents/task_agent.py new file mode 100644 index 0000000000..33fa6a7ce5 --- /dev/null +++ b/gpt_engineer/applications/interactive_cli_loop/agents/task_agent.py @@ -0,0 +1,94 @@ +from gpt_engineer.applications.interactive_cli_loop.task import Task +from gpt_engineer.applications.interactive_cli_loop.repository import Repository +from gpt_engineer.applications.interactive_cli_loop.files import Files +from gpt_engineer.applications.interactive_cli_loop.file_selection import FileSelector +from gpt_engineer.applications.interactive_cli_loop.agents.agent_steps import ( + adjust_prompt_files, + check_for_unstaged_changes, + update_user_file_selection, +) + +from gpt_engineer.core.ai import AI +from gpt_engineer.core.prompt import Prompt +from gpt_engineer.core.default.steps import improve_fn, handle_improve_mode +from gpt_engineer.core.default.disk_memory import DiskMemory +from gpt_engineer.core.default.paths import PREPROMPTS_PATH, memory_path +from gpt_engineer.core.preprompts_holder import PrepromptsHolder + +from prompt_toolkit import prompt as cli_input + + +class TaskAgent: + """ + A cli agent which implements a one off task + """ + + def __init__( + self, + ai: AI, + project_path: str, + task: Task, + repository: Repository, + file_selector: FileSelector, + ): + self.ai = ai + self.project_path = project_path + self.task = task + self.repository = repository + self.file_selector = file_selector + + def _confirm__task_with_user(self): + file_selector = self.file_selector + file_selector.update_yaml_from_tracked_files() + file_string = file_selector.get_pretty_selected_from_yaml() + + task = self.task.get_task() + + print(f"Files: \n\nrepo\n{file_string}\n\n") + print(f"Task: {task}\n\n") + + # do you want to attempt this task? + if cli_input("Do you want to implement this task? y/n: ").lower() in [ + "y", + "yes", + ]: + return True + + return False + + def _run_improve_mode(self): + memory = DiskMemory(memory_path(self.project_path)) + preprompts_holder = PrepromptsHolder(PREPROMPTS_PATH) + + prompt = Prompt(self.task.get_task()) + + selected_files = self.file_selector.get_from_yaml().included_files + + files = Files(self.project_path, selected_files) + + improve_lambda = lambda: improve_fn( + self.ai, prompt, files, memory, preprompts_holder + ) + + print("\n---- begining code generation ----\n") + updated_files_dictionary = handle_improve_mode(improve_lambda, memory) + print("\n---- ending code generation ----\n") + + files.write_to_disk(updated_files_dictionary) + + def run(self): + + self.task.open_task_in_editor() + input("Please edit the task file and then press Enter to continue...") + + update_user_file_selection(self.file_selector) + + implement = self._confirm__task_with_user() + + while not implement: + adjust_prompt_files() + implement = self._confirm__task_with_user() + + check_for_unstaged_changes(self.repository) + + self._run_improve_mode() diff --git a/gpt_engineer/applications/interactive_cli_loop/domain.py b/gpt_engineer/applications/interactive_cli_loop/domain.py new file mode 100644 index 0000000000..2493c40772 --- /dev/null +++ b/gpt_engineer/applications/interactive_cli_loop/domain.py @@ -0,0 +1,13 @@ +from dataclasses import dataclass +from typing import List + + +@dataclass +class FileSelection: + included_files: List[str] + excluded_files: List[str] + + +class Settings: + def __init__(self, no_branch: bool = False): + self.no_branch = no_branch diff --git a/gpt_engineer/applications/interactive_cli_loop/example_project b/gpt_engineer/applications/interactive_cli_loop/example_project new file mode 160000 index 0000000000..b22fbe6c76 --- /dev/null +++ b/gpt_engineer/applications/interactive_cli_loop/example_project @@ -0,0 +1 @@ +Subproject commit b22fbe6c760ac196edacdfb508ad300d033e12d9 diff --git a/gpt_engineer/applications/interactive_cli_loop/feature.py b/gpt_engineer/applications/interactive_cli_loop/feature.py new file mode 100644 index 0000000000..909e5a36c3 --- /dev/null +++ b/gpt_engineer/applications/interactive_cli_loop/feature.py @@ -0,0 +1,181 @@ +import json +import os +import platform +import subprocess +from pathlib import Path +from typing import Union + +from gpt_engineer.core.default.disk_memory import DiskMemory +from gpt_engineer.core.default.paths import memory_path +from gpt_engineer.applications.interactive_cli_loop.file_selection import FileSelector +from gpt_engineer.applications.interactive_cli_loop.repository import Repository + + +class Feature(DiskMemory): + """ + Represents a ticket which will be developed incrementally, + + Includes with a feature (overal description of the change), + a task (current incremental work item), + and progress (history of incremental work completed) + """ + + def __init__(self, project_path: Union[str, Path], repository: Repository): + + self._feature_path = Path(memory_path(project_path)) / "feature" + self.path = self._feature_path + self._feature_filename = "feature.md" + self._progress_filename = "progress.json" + self._task_filename = "task.md" + + self._feature_placeholder = """Please replace with your own feature description. Markdown is supported. + +Hint: +Improve your prompts by including technical references to any APIs, libraries, components etc that the pre trained model may not know about in detail already.""" + + self._task_placeholder = "Please replace with a task description - directing the AI on the first task to implement on this feature" + + if not os.path.exists(self._feature_path): + os.makedirs(self._feature_path) + + super().__init__(self._feature_path) + + def clear_feature(self) -> None: + self.set_description(self._feature_placeholder) + self.set_task(self._task_placeholder) + super().__setitem__(self._progress_filename, json.dumps({"done": []})) + + def get_description(self) -> str: + """ + Retrieve the content of the feature file in the database. + + Returns + ------- + str + The content of the feature file. + """ + return super().__getitem__(self._feature_filename) + + def set_description(self, feature_description: str): + """ + Updates the feature file with new text. + + Parameters + ---------- + feature_description : str + The new feature_description to write to the feature file. + """ + super().__setitem__(self._feature_filename, feature_description) + + def get_progress(self) -> dict: + """ + Retrieve the progress object. + + Returns + ------- + str + The content of the feature file. + """ + + json_string = super().__getitem__(self._progress_filename) + if json_string: + return json.loads(json_string) + + return None + + def update_progress(self, task: str): + """ + Updates the progress with a new completed task. + + Parameters + ---------- + feature_description : str + The new feature_description to write to the feature file. + """ + progress = self.get_progress() + + print(progress["done"]) + + progress["done"].append(task) + + json_string = json.dumps(progress, indent=4) + + super().__setitem__(self._progress_filename, json_string) + + def set_task(self, task: str): + """ + Updates the task file with new text. + + Parameters + ---------- + task : str + The new task to write to the feature file. + """ + super().__setitem__(self._task_filename, task) + + def get_task(self) -> str: + """ + Retrieve the content of the feature file in the database. + + Returns + ------- + str + The content of the feature file. + """ + return super().__getitem__(self._task_filename) + + def has_task(self) -> bool: + """ + Retrieve the content of the feature file in the database. + + Returns + ------- + str + The content of the feature file. + """ + + task = self.get_task() + + if task and not task == self._task_placeholder: + return True + + return False + + def complete_task(self): + """ + Moves the current task to the 'done' list in the progress.json file and clears the task file. + """ + task = self.get_task() + + if task: + self.update_progress(task) + self.set_task("") + + def _file_path(self, filename): + return self._feature_path / filename + + def _open_file_in_editor(self, path): + """ + Opens the generated YAML file in the default system editor. + If the YAML file is empty or doesn't exist, generate it first. + """ + + # Platform-specific methods to open the file + if platform.system() == "Windows": + os.startfile(path) + elif platform.system() == "Darwin": + subprocess.run(["open", path]) + else: # Linux and other Unix-like systems + subprocess.run(["xdg-open", path]) + + def open_feature_in_editor(self): + """ + Opens the feature file in the default system editor. + """ + self._open_file_in_editor(self._file_path(self._feature_filename)) + + def open_task_in_editor(self): + """ + Opens the task file in the default system editor. + """ + self._open_file_in_editor(self._file_path(self._task_filename)) diff --git a/gpt_engineer/applications/interactive_cli_loop/file_selection.py b/gpt_engineer/applications/interactive_cli_loop/file_selection.py new file mode 100644 index 0000000000..6ccfeed17c --- /dev/null +++ b/gpt_engineer/applications/interactive_cli_loop/file_selection.py @@ -0,0 +1,320 @@ +import os +import platform +import subprocess +import yaml +from pathlib import Path + + +from gpt_engineer.core.default.paths import memory_path +from gpt_engineer.core.ai import AI + +from gpt_engineer.applications.interactive_cli_loop.repository import Repository +from gpt_engineer.applications.interactive_cli_loop.files import Files +from gpt_engineer.applications.interactive_cli_loop.generation_tools import ( + fuzzy_parse_file_selection, +) +from gpt_engineer.applications.interactive_cli_loop.domain import FileSelection + + +def paths_to_tree(paths): + tree = {} + files_marker = "(./)" + + for path in paths: + parts = path.split("/") + current_level = tree + + for part in parts[:-1]: + if part not in current_level: + current_level[part] = {} + current_level = current_level[part] + + if isinstance(current_level, dict): + if files_marker not in current_level: + current_level[files_marker] = [] + current_level[files_marker].append(parts[-1]) + + # Clean and sort the tree to match the required format + def clean_tree(node): + if not isinstance(node, dict): + return node + sorted_keys = sorted(node.keys(), key=lambda x: (x == files_marker, x)) + cleaned_node = {key: clean_tree(node[key]) for key in sorted_keys} + if sorted_keys == [files_marker]: + return cleaned_node[files_marker] + return cleaned_node + + cleaned_tree = clean_tree(tree) + return cleaned_tree + + +def tree_to_paths(tree): + + files_marker = "(./)" + + def traverse_tree(tree, base_path=""): + paths = [] + if tree: + for key, value in tree.items(): + if key == files_marker: + if value: + for file in value: + paths.append(os.path.join(base_path, file)) + elif isinstance(value, list): + for file in value: + paths.append(os.path.join(base_path, key, file)) + else: + subfolder_path = os.path.join(base_path, key) + paths.extend(traverse_tree(value, subfolder_path)) + return paths + + return traverse_tree(tree) + + +def commented_yaml_to_file_selection(commented_content) -> FileSelection: + commented_content_lines = commented_content.split("\n") + uncommented_content_1 = "\n".join( + line.replace("# ", "").replace("#", "") for line in commented_content_lines + ) + uncommented_content_2 = "\n".join( + line.replace("#", "") for line in commented_content_lines + ) + + included_files = tree_to_paths(yaml.safe_load(commented_content)) + try: + all_files = tree_to_paths(yaml.safe_load(uncommented_content_1)) + except: + try: + all_files = tree_to_paths(yaml.safe_load(uncommented_content_2)) + except: + raise ValueError( + "Could not convert the commented yaml to a file selection. Please check the format." + ) + + included_files_not_in_all_files = set(included_files) - set(all_files) + + if len(included_files_not_in_all_files) > 0: + raise ValueError("Yaml file selection has not been read correctly.") + + excluded_files = list(set(all_files) - set(included_files)) + return FileSelection(included_files, excluded_files) + + +def file_selection_to_commented_yaml(selection: FileSelection) -> str: + # Dont worry about commenting lines if they are no excluded files + if not selection.excluded_files: + tree = paths_to_tree(selection.included_files) + + return yaml.dump(tree, sort_keys=False) + + all_files = list(selection.included_files) + list(selection.excluded_files) + + current_tree = paths_to_tree(all_files) + + # Add a # in front of files which are excluded. This is a marker for us to go back and properly comment them out + def mark_excluded_files(structure, prefix=""): + if isinstance(structure, dict): + for key, value in structure.items(): + if key == "(./)": + structure[key] = mark_excluded_files(value, prefix) + else: + new_prefix = os.path.join(prefix, key) + structure[key] = mark_excluded_files(value, new_prefix) + elif isinstance(structure, list): + for i, item in enumerate(structure): + full_path = os.path.join(prefix, item) + + if full_path in selection.excluded_files: + structure[i] = f"#{item}" + + return structure + + mark_excluded_files(current_tree) + + content = yaml.dump(current_tree, sort_keys=False) + + # Find all files marked for commenting - add comment and remove the mark. + def comment_marked_files(yaml_content): + lines = yaml_content.split("\n") + + updated_lines = [] + for line in lines: + if "#" in line: + line = line.replace("- '#", "#- ").replace("'", "") + updated_lines.append(line) + + return "\n".join(updated_lines) + + commented_yaml = comment_marked_files(content) + + return commented_yaml + + +class FileSelector: + """ + Manages the active files in a project directory and creates a YAML file listing them. + """ + + def __init__(self, project_path: str, repository: Repository): + self.project_path = project_path + self.ai = AI("gpt-4o", temperature=0) + self.repository = repository + self.yaml_path = Path(memory_path(project_path)) / "files.yml" + + if os.path.exists(self.yaml_path): + return + + print("YAML file is missing or empty, generating YAML...") + + file_selection = FileSelection([], self.repository.get_tracked_files()) + + self.set_to_yaml(file_selection) + + def _write_yaml_with_header(self, yaml_content): + + def add_indentation(content): + lines = content.split("\n") + new_lines = [] + last_key = None + + for line in lines: + stripped_line = line.replace("#", "").strip() + if stripped_line.endswith(":"): + last_key = stripped_line + if stripped_line.startswith("- ") and (last_key != "(./):"): + # add 2 spaces at the begining of line or after any # + + new_lines.append(" " + line) # Add extra indentation + else: + new_lines.append(line) + return "\n".join(new_lines) + + indented_content = add_indentation(yaml_content) + with open(self.yaml_path, "w") as file: + file.write( + f"""# Uncomment any files you would like to use for this feature +# Note that (./) is a special key which represents files at the root of the parent directory + +{indented_content}""" + ) + + def _read_yaml_with_headers(self): + with open(self.yaml_path, "r") as file: + original_content_lines = file.readlines()[3:] + + return "".join(original_content_lines) + + def set_to_yaml(self, file_selection): + + commented_yaml = file_selection_to_commented_yaml(file_selection) + + self._write_yaml_with_header(commented_yaml) + + return + + def update_yaml_from_tracked_files(self): + """ + Updates the YAML file with the current list of tracked files. + """ + + tracked_files = self.repository.get_tracked_files() + + file_selection = self.get_from_yaml() + + # If there are no changes, do nothing + if set(tracked_files) == set( + file_selection.included_files + file_selection.excluded_files + ): + return + + new_included_files = list( + set(tracked_files) - set(file_selection.excluded_files) + ) + + self.set_to_yaml( + FileSelection(new_included_files, file_selection.excluded_files) + ) + + def get_from_yaml(self) -> FileSelection: + """ + Get selected file paths and excluded file paths from yaml + """ + + yaml_content = self._read_yaml_with_headers() + + try: + file_selection = commented_yaml_to_file_selection(yaml_content) + except: + print( + "Could not read the file selection from the YAML file. Attempting to fix with AI" + ) + print(yaml_content) + file_selection = fuzzy_parse_file_selection(self.ai, yaml_content) + self.set_to_yaml(file_selection) + + return file_selection + + def get_pretty_selected_from_yaml(self) -> str: + """ + Retrieves selected file paths from the YAML file and prints them in an ASCII-style tree structure. + """ + # Get selected files from YAML + file_selection = self.get_from_yaml() + + # Helper function to insert a path into the tree dictionary + def insert_path(tree, path_parts): + # Recursively build nested dictionary from path parts + if not path_parts: + return + if path_parts[0] not in tree: + tree[path_parts[0]] = {} + insert_path(tree[path_parts[0]], path_parts[1:]) + + file_tree = {} + for filepath in file_selection.included_files: + parts = filepath.split("/") + insert_path(file_tree, parts) + + # Helper function to format the tree into a string with ASCII graphics + def format_tree(tree, prefix=""): + lines = [] + # Separate directories and files + directories = {k: v for k, v in tree.items() if v} + files = {k: v for k, v in tree.items() if not v} + # Sort items to keep alphabetical order, directories first + items = sorted(directories.items()) + sorted(files.items()) + for i, (key, sub_tree) in enumerate(items): + if i == len(items) - 1: # Last item uses └── + lines.append(prefix + "└── " + key) + extension = " " + else: + lines.append(prefix + "├── " + key) + extension = "│ " + if sub_tree: + lines.extend(format_tree(sub_tree, prefix=prefix + extension)) + return lines + + # Generate formatted tree lines + tree_lines = format_tree(file_tree) + + # Join lines and return as a string + return "\n".join(tree_lines) + + def open_yaml_in_editor(self): + """ + Opens the generated YAML file in the default system editor. + If the YAML file is empty or doesn't exist, generate it first. + """ + + # Platform-specific methods to open the file + if platform.system() == "Windows": + os.startfile(self.yaml_path) + elif platform.system() == "Darwin": + subprocess.run(["open", self.yaml_path]) + else: # Linux and other Unix-like systems + subprocess.run(["xdg-open", self.yaml_path]) + + def get_included_as_file_repository(self): + file_selection = self.get_from_yaml() + + return Files(self.project_path, file_selection.included_files) diff --git a/gpt_engineer/applications/interactive_cli_loop/files.py b/gpt_engineer/applications/interactive_cli_loop/files.py new file mode 100644 index 0000000000..b1e7347129 --- /dev/null +++ b/gpt_engineer/applications/interactive_cli_loop/files.py @@ -0,0 +1,40 @@ +from pathlib import Path + +from gpt_engineer.core.files_dict import FilesDict + + +class Files(FilesDict): + def __init__(self, project_path: str, selected_files: list): + """ + Initialize the Files object by reading the content of the provided file paths. + + Parameters + ---------- + project_path : str + The base path of the project. + selected_files : list + List of file paths relative to the project path. + """ + + self.project_path = project_path + # Convert the list of selected files and their relative directory into a dictionary of relative file paths + content_dict = {} + for file_path in selected_files: + try: + with open( + Path(project_path) / file_path, "r", encoding="utf-8" + ) as content: + content_dict[str(file_path)] = content.read() + except FileNotFoundError: + print(f"Warning: File not found {file_path}") + except UnicodeDecodeError: + print(f"Warning: File not UTF-8 encoded {file_path}, skipping") + super().__init__(content_dict) + + def write_to_disk(self, files: FilesDict): + for name, content in files.items(): + path = Path(self.project_path) / name + path.parent.mkdir(parents=True, exist_ok=True) + with open(path, "w") as f: + f.write(content) + return self diff --git a/gpt_engineer/applications/interactive_cli_loop/generation_tools.py b/gpt_engineer/applications/interactive_cli_loop/generation_tools.py new file mode 100644 index 0000000000..af6c959bef --- /dev/null +++ b/gpt_engineer/applications/interactive_cli_loop/generation_tools.py @@ -0,0 +1,483 @@ +import xml.etree.ElementTree as ET +import json + +from gpt_engineer.applications.interactive_cli_loop.domain import FileSelection +from gpt_engineer.core.ai import AI + +from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler + + +def generate_branch_name(ai: AI, feature_description: str) -> str: + system_prompt = """ + You are a branch name autocomplete / suggestion tool. Based on the users input, please respond with a single suggestion of a branch name and notthing else. + + Example: + + Input: I want to add a login button + Output: feature/login-button + """ + + ai.llm.callbacks.clear() # silent + + messages = ai.start(system_prompt, feature_description, step_name="name-branch") + + ai.llm.callbacks.append(StreamingStdOutCallbackHandler()) + + return messages[-1].content.strip() + + +class TaskResponse: + def __init__(self, planning_thoughts, tasks, closing_remarks): + self.planning_thoughts = planning_thoughts + self.tasks = tasks + self.closing_remarks = closing_remarks + + def __str__(self): + return f"Planning Thoughts: {self.planning_thoughts}\nTasks: {'; '.join(self.tasks)}\nClosing Remarks: {self.closing_remarks}" + + +def parse_task_xml_to_class(xml_data): + # Parse the XML data + root = ET.fromstring(xml_data) + + # Extract the planning thoughts + planning_thoughts = root.find("PlanningThoughts").text.strip() + + # Extract tasks + tasks = [task.text.strip() for task in root.findall(".//Task")] + + # Extract closing remarks + closing_remarks = root.find("ClosingRemarks").text.strip() + + # Create an instance of the response class + response = TaskResponse(planning_thoughts, tasks, closing_remarks) + + return response + + +def build_git_context_string(git_context): + return f"""## Git Context - these are the code changes made so far while implementing this feature. This may include work completed by you on previous tasks as well as changes made independently by me. +### Branch Changes - this is the cumulative diff of all the commits so far on the feature branch. +{git_context.branch_changes} + +### Staged Changes - this is the diff of the current staged changes. +{git_context.staged_changes}""" + + +def build_feature_context_string(feature, git_context): + feature_string = f"""## Feature - this is the description fo the current feature we are working on. +{feature.get_description()} + +## Completed Tasks - these are the lists of tasks you have completed so far on the feature branch. +{feature.get_progress()["done"]} +""" + + if git_context: + return f"""{feature_string} + +{build_git_context_string(git_context)} +""" + + return feature_string + + +def build_files_context_string(feature, git_context, files): + return f"""{build_feature_context_string(feature, git_context)} + +## Current Codebase - this is the as is view of the current code base including any unstaged changes. +{files.to_chat()} +""" + + +def generate_suggested_tasks(ai: AI, feature, git_context, files) -> str: + system_prompt = """ +You are a software engineer work planning tool. Given a feature description, a list of tasks already completed, and sections of the code +repository we are working on, suggest a list of implementation tasks to be done in order to move towards the end goal of completing the feature. + +An implementation task consists of actually writing some code - and doesnt include review or research tasks, or any other activity other tha writing code. + +First start by outputting your planning thoughts: an overview of what we are trying to achieve, what we have achieved so far, and what implementation tasks are left to be done. + +Then output the list of between 0 and 3 implementation tasks to be done which get us closer to our goal. Please try to keep the tasks small, actionable and independantly commitable. + +We only need to move towards our goal with these tasks, we dont have to complete the feature in these 3 steps. + +The output format will be XML as follows: + + + + + + + + + + + + + + + + + + + + + +Respond in XML and nothing else. + +You may send as as little as 0 tasks and as many as 3. If you believe the feature is complete, send 0 tasks. +""" + + input = build_files_context_string(feature, git_context, files) + + ai.llm.callbacks.clear() # silent + + messages = ai.start(system_prompt, input, step_name="suggest-tasks") + + ai.llm.callbacks.append(StreamingStdOutCallbackHandler()) + + xml = messages[-1].content.strip() + + return parse_task_xml_to_class(xml) + + +def fuzzy_parse_file_selection(ai: AI, yaml_string: str) -> FileSelection: + # todo: load prompt from ptompts/fuzzy_file_parser + + system_prompt = """## Explanation +You are a fuzzy yaml parser, who correctly parses yaml even if it is not strictly valid. + +A user has been given a yaml representation of a file structure, represented like so: + +.github: + ISSUE_TEMPLATE: + - bug-report.md + - documentation-clarification.md + - feature-request.md + PULL_REQUEST_TEMPLATE: + - PULL_REQUEST_TEMPLATE.md + workflows: + - automation.yml + - ci.yaml + - pre-commit.yaml + - release.yaml + (./): + - CODEOWNERS + - CODE_OF_CONDUCT.md + - CONTRIBUTING.md + - FUNDING.yml + +Folders are represented as keys in a dictionary, files are items in a list. Any files listed under the (./) key can be assumed to be files of the folder above that. + +The given example maps to these file paths: + +".github/ISSUE_TEMPLATE/bug-report.md", +".github/ISSUE_TEMPLATE/documentation-clarification.md", +".github/ISSUE_TEMPLATE/feature-request.md", +".github/PULL_REQUEST_TEMPLATE/PULL_REQUEST_TEMPLATE.md", +".github/workflows/automation.yml", +".github/workflows/ci.yaml", +".github/workflows/pre-commit.yaml", +".github/workflows/release.yaml", +".github/CODEOWNERS", +".github/CODE_OF_CONDUCT.md", +".github/CONTRIBUTING.md", +".github/FUNDING.yml", + +An example of the yaml file after commenting might be something like this: + + +.github: + # ISSUE_TEMPLATE: + # - bug-report.md + # - documentation-clarification.md + # - feature-request.md + # PULL_REQUEST_TEMPLATE: + # - PULL_REQUEST_TEMPLATE.md + workflows: + - automation.yml + - ci.yaml + - pre-commit.yaml + - release.yaml + # (./): + # - CODEOWNERS + - CODE_OF_CONDUCT.md + - CONTRIBUTING.md + # - FUNDING.yml + + +This would convert into: + +{ + "included_files": [ + ".github/workflows/automation.yml", + ".github/workflows/ci.yaml", + ".github/workflows/pre-commit.yaml", + ".github/workflows/release.yaml", + ".github/CODE_OF_CONDUCT.md", + ".github/CONTRIBUTING.md" + ], + "excluded_files": [ + ".github/ISSUE_TEMPLATE/bug-report.md", + ".github/ISSUE_TEMPLATE/documentation-clarification.md", + ".github/ISSUE_TEMPLATE/feature-request.md", + ".github/PULL_REQUEST_TEMPLATE/PULL_REQUEST_TEMPLATE.md", + ".github/CODEOWNERS", + ".github/FUNDING.yml" + ] +} + + +Although the commmented content wasnt strictly correct yaml, their intentions were clear. They wanted to retain the files in the workflow folder aswell as the code of conduct and contributing guides + +Based on commented yaml inputs such as this, your job is to output JSON, indicating which files have been included and which have been excluded. + +Excluded files are always commented out with a # like in the above example. + +The json you should return will be like this: + +{ + "included_files": [ + "folder1/file5", + "folder1/folder3/file3", + "file7" + ], + "excluded_files": [ + "folder1/folder2/file1", + "folder1/folder2/file2", + "folder1/folder3/file4", + "folder1/file5", + ] +} + +Files can only be included or excluded, not both. If you are confused about the state of a file make your best guess - and if you really arent sure then mark it as included. + +Respond in JSON and nothing else. + +## Examples + +Example 1: + +Input: + +.github: + ISSUE_TEMPLATE: + - bug_report.md + - feature_request.md + PULL_REQUEST_TEMPLATE: + - pull_request_template.md + # workflows: + # - ci.yml + # - release.yml + +Output: + +{ + "included_files": [ + ".github/ISSUE_TEMPLATE/bug_report.md", + ".github/ISSUE_TEMPLATE/feature_request.md", + ".github/PULL_REQUEST_TEMPLATE/pull_request_template.md" + ], + "excluded_files": [ + ".github/workflows/ci.yml", + ".github/workflows/release.yml" + ] +} + +Example 2: + +Input: + +source: + # controllers: + # - MainController.cs + # - AuthController.cs + models: + - User.cs + - Post.cs + views: + Home: + - Index.cshtml + # - About.cshtml + Auth: + - Login.cshtml + - Register.cshtml + (./): + - Dockerfile + +Output: + +{ + "included_files": [ + "source/models/User.cs", + "source/models/Post.cs", + "source/views/Home/Index.cshtml", + "source/views/Auth/Login.cshtml", + "source/views/Auth/Register.cshtml" + "source/Dockerfile", + ], + "excluded_files": [ + "source/controllers/MainController.cs", + "source/controllers/AuthController.cs", + "source/views/Home/About.cshtml" + ] +} + +Example 3: + +Input: + +src: + main: + java: + com: + example: + # controllers: + # - UserController.java + # - PostController.java + models: + - User.java + - Post.java + # repositories: + # - UserRepository.java + # - PostRepository.java + services: + - UserService.java + - PostService.java + resources: + - application.properties + test: + java: + com: + example: + controllers: + - UserControllerTest.java + - PostControllerTest.java + (./): + - pom.xml + - Dockerfile + +Output: + +{ + "included_files": [ + "src/main/java/com/example/models/User.java", + "src/main/java/com/example/models/Post.java", + "src/main/java/com/example/services/UserService.java", + "src/main/java/com/example/services/PostService.java", + "src/main/resources/application.properties", + "src/test/java/com/example/controllers/UserControllerTest.java", + "src/test/java/com/example/controllers/PostControllerTest.java", + "pom.xml", + "Dockerfile" + ], + "excluded_files": [ + "src/main/java/com/example/controllers/UserController.java", + "src/main/java/com/example/controllers/PostController.java", + "src/main/java/com/example/repositories/UserRepository.java", + "src/main/java/com/example/repositories/PostRepository.java" + ] +} + +Example 4: + +Input: + + +app: + # controllers: + # - application_controller.rb + # - users_controller.rb + models: + - user.rb + - post.rb + views: + layouts: + - application.html.erb + users: + - index.html.erb + - show.html.erb + posts: + - index.html.erb + # - show.html.erb + (./): + - Gemfile + - config +config: + environments: + - development.rb + - test.rb + # - production.rb + initializers: + - application_controller_renderer.rb + locales: + - en.yml + # routes.rb +db: + migrate: + - 20211025120523_create_users.rb + - 20211025120530_create_posts.rb +test: + fixtures: + - users.yml + - posts.yml + # controllers: + # - users_controller_test.rb + # - posts_controller_test.rb + models: + - user_test.rb + - post_test.rb + + +Output: + +{ + "included_files": [ + "app/models/user.rb", + "app/models/post.rb", + "app/views/layouts/application.html.erb", + "app/views/users/index.html.erb", + "app/views/users/show.html.erb", + "app/views/posts/index.html.erb", + "app/Gemfile", + "config/environments/development.rb", + "config/environments/test.rb", + "config/initializers/application_controller_renderer.rb", + "config/locales/en.yml", + "db/migrate/20211025120523_create_users.rb", + "db/migrate/20211025120530_create_posts.rb", + "test/fixtures/users.yml", + "test/fixtures/posts.yml", + "test/models/user_test.rb", + "test/models/post_test.rb" + ], + "excluded_files": [ + "app/controllers/application_controller.rb", + "app/controllers/users_controller.rb", + "app/views/posts/show.html.erb", + "config/environments/production.rb", + "config/routes.rb", + "test/controllers/users_controller_test.rb", + "test/controllers/posts_controller_test.rb" + ] +} + +## IMPORTANT +Remember any line that is commented is an excluded file. Any line that is NOT commented - is an included file. +""" + + # ai.llm.callbacks.clear() # silent + + messages = ai.start(system_prompt, yaml_string, step_name="fuzzy-parse-yaml") + + # ai.llm.callbacks.append(StreamingStdOutCallbackHandler()) + + json_string = messages[-1].content.strip() + + # strip anything before first { and after last } + json_string = json_string[json_string.find("{") : json_string.rfind("}") + 1] + + data = json.loads(json_string) + + return FileSelection(data["included_files"], data["excluded_files"]) diff --git a/gpt_engineer/applications/interactive_cli_loop/main.py b/gpt_engineer/applications/interactive_cli_loop/main.py new file mode 100644 index 0000000000..0a92d843a2 --- /dev/null +++ b/gpt_engineer/applications/interactive_cli_loop/main.py @@ -0,0 +1,147 @@ +import typer +from dotenv import load_dotenv +from pathlib import Path +from gpt_engineer.core.default.paths import memory_path + +from gpt_engineer.applications.interactive_cli_loop.agents.task_agent import TaskAgent +from gpt_engineer.applications.interactive_cli_loop.agents.feature_agent import ( + FeatureAgent, +) +from gpt_engineer.applications.interactive_cli_loop.agents.chat_agent import ChatAgent +from gpt_engineer.applications.interactive_cli_loop.feature import Feature +from gpt_engineer.applications.interactive_cli_loop.task import Task +from gpt_engineer.applications.interactive_cli_loop.repository import Repository +from gpt_engineer.applications.interactive_cli_loop.domain import Settings +from gpt_engineer.applications.interactive_cli_loop.file_selection import FileSelector + + +from gpt_engineer.core.ai import AI + +app = typer.Typer() + + +@app.command() +def feature( + new: bool = typer.Option(False, "--new", "-n", help="Initialize a new feature."), + project_path: str = typer.Option(".", "--path", "-p", help="Path to the project."), + model: str = typer.Option("gpt-4o", "--model", "-m", help="Model ID string."), + temperature: float = typer.Option( + 0.1, + "--temperature", + "-t", + help="Controls randomness: lower values for more focused, deterministic outputs.", + ), + azure_endpoint: str = typer.Option( + "", + "--azure", + "-a", + help="""Endpoint for your Azure OpenAI Service (https://xx.openai.azure.com). + In that case, the given model is the deployment name chosen in the Azure AI Studio.""", + ), + verbose: bool = typer.Option( + False, "--verbose", "-v", help="Enable verbose logging for debugging." + ), + debug: bool = typer.Option( + False, "--debug", "-d", help="Enable debug mode for debugging." + ), + no_branch: bool = typer.Option( + False, + "--no-branch", + "-nb", + help="Do not create a new feature branch for this work.", + ), +): + """ + Handle features in the project. + """ + load_dotenv() + + ai = AI( + model_name=model, + temperature=temperature, + ) + + repository = Repository(project_path) + + feature = Feature(project_path, repository) + + file_selector = FileSelector(project_path, repository) + + agent = FeatureAgent(ai, project_path, feature, repository, file_selector) + + settings = Settings(no_branch) + + if new: + agent.init(settings) + else: + agent.resume(settings) + + +@app.command() +def chat( + project_path: str = typer.Option(".", "--path", "-p", help="Path to the project."), + model: str = typer.Option("gpt-4o", "--model", "-m", help="Model ID string."), + temperature: float = typer.Option( + 0.8, + "--temperature", + "-t", + help="Controls randomness: lower values for more focused, deterministic outputs.", + ), +): + """ + Initiate a chat about the current repository and feature context + """ + ai = AI( + model_name=model, + temperature=temperature, + ) + + repository = Repository(project_path) + + feature = Feature(project_path, repository) + + file_selector = FileSelector(project_path, repository) + + chat_agent = ChatAgent(ai, project_path, feature, repository, file_selector) + + chat_agent.start() + + +if __name__ == "__main__": + app() + + +@app.command() +def task( + project_path: str = typer.Option(".", "--path", "-p", help="Path to the project."), + model: str = typer.Option("gpt-4o", "--model", "-m", help="Model ID string."), + temperature: float = typer.Option( + 0.1, + "--temperature", + "-t", + help="Controls randomness: lower values for more focused, deterministic outputs.", + ), +): + """ + Implement a simple one off task without feature context + """ + load_dotenv() + + ai = AI( + model_name=model, + temperature=temperature, + ) + + repository = Repository(project_path) + + task = Task(project_path) + + file_selector = FileSelector(project_path, repository) + + task_agent = TaskAgent(ai, project_path, task, repository, file_selector) + + task_agent.run() + + # review + + # task.delete() diff --git a/gpt_engineer/applications/interactive_cli_loop/prompts/__init__.py b/gpt_engineer/applications/interactive_cli_loop/prompts/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/gpt_engineer/applications/interactive_cli_loop/prompts/fuzzy_file_parser b/gpt_engineer/applications/interactive_cli_loop/prompts/fuzzy_file_parser new file mode 100644 index 0000000000..75200759b1 --- /dev/null +++ b/gpt_engineer/applications/interactive_cli_loop/prompts/fuzzy_file_parser @@ -0,0 +1,320 @@ +## Explanation +You are a fuzzy yaml parser, who correctly parses yaml even if it is not strictly valid. + +A user has been given a yaml representation of a file structure, represented like so: + +.github: + ISSUE_TEMPLATE: + - bug-report.md + - documentation-clarification.md + - feature-request.md + PULL_REQUEST_TEMPLATE: + - PULL_REQUEST_TEMPLATE.md + workflows: + - automation.yml + - ci.yaml + - pre-commit.yaml + - release.yaml + (./): + - CODEOWNERS + - CODE_OF_CONDUCT.md + - CONTRIBUTING.md + - FUNDING.yml + +Folders are represented as keys in a dictionary, files are items in a list. Any files listed under the (./) key can be assumed to be files of the folder above that. + +The given example maps to these file paths: + +".github/ISSUE_TEMPLATE/bug-report.md", +".github/ISSUE_TEMPLATE/documentation-clarification.md", +".github/ISSUE_TEMPLATE/feature-request.md", +".github/PULL_REQUEST_TEMPLATE/PULL_REQUEST_TEMPLATE.md", +".github/workflows/automation.yml", +".github/workflows/ci.yaml", +".github/workflows/pre-commit.yaml", +".github/workflows/release.yaml", +".github/CODEOWNERS", +".github/CODE_OF_CONDUCT.md", +".github/CONTRIBUTING.md", +".github/FUNDING.yml", + +An example of the yaml file after commenting might be something like this: + + +.github: + # ISSUE_TEMPLATE: + # - bug-report.md + # - documentation-clarification.md + # - feature-request.md + # PULL_REQUEST_TEMPLATE: + # - PULL_REQUEST_TEMPLATE.md + workflows: + - automation.yml + - ci.yaml + - pre-commit.yaml + - release.yaml + # (./): + # - CODEOWNERS + - CODE_OF_CONDUCT.md + - CONTRIBUTING.md + # - FUNDING.yml + + +This would convert into: + +{ + "included_files": [ + ".github/workflows/automation.yml", + ".github/workflows/ci.yaml", + ".github/workflows/pre-commit.yaml", + ".github/workflows/release.yaml", + ".github/CODE_OF_CONDUCT.md", + ".github/CONTRIBUTING.md" + ], + "excluded_files": [ + ".github/ISSUE_TEMPLATE/bug-report.md", + ".github/ISSUE_TEMPLATE/documentation-clarification.md", + ".github/ISSUE_TEMPLATE/feature-request.md", + ".github/PULL_REQUEST_TEMPLATE/PULL_REQUEST_TEMPLATE.md", + ".github/CODEOWNERS", + ".github/FUNDING.yml" + ] +} + + +Although the commmented content wasnt strictly correct yaml, their intentions were clear. They wanted to retain the files in the workflow folder aswell as the code of conduct and contributing guides + +Based on commented yaml inputs such as this, your job is to output JSON, indicating which files have been included and which have been excluded. + +Excluded files are always commented out with a # like in the above example. + +The json you should return will be like this: + +{ + "included_files": [ + "folder1/file5", + "folder1/folder3/file3", + "file7" + ], + "excluded_files": [ + "folder1/folder2/file1", + "folder1/folder2/file2", + "folder1/folder3/file4", + "folder1/file5", + ] +} + +Files can only be included or excluded, not both. If you are confused about the state of a file make your best guess - and if you really arent sure then mark it as included. + +Respond in JSON and nothing else. + +## Examples + +Example 1: + +Input: + +.github: + ISSUE_TEMPLATE: + - bug_report.md + - feature_request.md + PULL_REQUEST_TEMPLATE: + - pull_request_template.md + # workflows: + # - ci.yml + # - release.yml + +Output: + +{ + "included_files": [ + ".github/ISSUE_TEMPLATE/bug_report.md", + ".github/ISSUE_TEMPLATE/feature_request.md", + ".github/PULL_REQUEST_TEMPLATE/pull_request_template.md" + ], + "excluded_files": [ + ".github/workflows/ci.yml", + ".github/workflows/release.yml" + ] +} + +Example 2: + +Input: + +source: + # controllers: + # - MainController.cs + # - AuthController.cs + models: + - User.cs + - Post.cs + views: + Home: + - Index.cshtml + # - About.cshtml + Auth: + - Login.cshtml + - Register.cshtml + (./): + - Dockerfile + +Output: + +{ + "included_files": [ + "source/models/User.cs", + "source/models/Post.cs", + "source/views/Home/Index.cshtml", + "source/views/Auth/Login.cshtml", + "source/views/Auth/Register.cshtml" + "source/Dockerfile", + ], + "excluded_files": [ + "source/controllers/MainController.cs", + "source/controllers/AuthController.cs", + "source/views/Home/About.cshtml" + ] +} + +Example 3: + +Input: + +src: + main: + java: + com: + example: + # controllers: + # - UserController.java + # - PostController.java + models: + - User.java + - Post.java + # repositories: + # - UserRepository.java + # - PostRepository.java + services: + - UserService.java + - PostService.java + resources: + - application.properties + test: + java: + com: + example: + controllers: + - UserControllerTest.java + - PostControllerTest.java + (./): + - pom.xml + - Dockerfile + +Output: + +{ + "included_files": [ + "src/main/java/com/example/models/User.java", + "src/main/java/com/example/models/Post.java", + "src/main/java/com/example/services/UserService.java", + "src/main/java/com/example/services/PostService.java", + "src/main/resources/application.properties", + "src/test/java/com/example/controllers/UserControllerTest.java", + "src/test/java/com/example/controllers/PostControllerTest.java", + "pom.xml", + "Dockerfile" + ], + "excluded_files": [ + "src/main/java/com/example/controllers/UserController.java", + "src/main/java/com/example/controllers/PostController.java", + "src/main/java/com/example/repositories/UserRepository.java", + "src/main/java/com/example/repositories/PostRepository.java" + ] +} + +Example 4: + +Input: + + +app: + # controllers: + # - application_controller.rb + # - users_controller.rb + models: + - user.rb + - post.rb + views: + layouts: + - application.html.erb + users: + - index.html.erb + - show.html.erb + posts: + - index.html.erb + # - show.html.erb + (./): + - Gemfile + - config +config: + environments: + - development.rb + - test.rb + # - production.rb + initializers: + - application_controller_renderer.rb + locales: + - en.yml + # routes.rb +db: + migrate: + - 20211025120523_create_users.rb + - 20211025120530_create_posts.rb +test: + fixtures: + - users.yml + - posts.yml + # controllers: + # - users_controller_test.rb + # - posts_controller_test.rb + models: + - user_test.rb + - post_test.rb + + +Output: + +{ + "included_files": [ + "app/models/user.rb", + "app/models/post.rb", + "app/views/layouts/application.html.erb", + "app/views/users/index.html.erb", + "app/views/users/show.html.erb", + "app/views/posts/index.html.erb", + "app/Gemfile", + "config/environments/development.rb", + "config/environments/test.rb", + "config/initializers/application_controller_renderer.rb", + "config/locales/en.yml", + "db/migrate/20211025120523_create_users.rb", + "db/migrate/20211025120530_create_posts.rb", + "test/fixtures/users.yml", + "test/fixtures/posts.yml", + "test/models/user_test.rb", + "test/models/post_test.rb" + ], + "excluded_files": [ + "app/controllers/application_controller.rb", + "app/controllers/users_controller.rb", + "app/views/posts/show.html.erb", + "config/environments/production.rb", + "config/routes.rb", + "test/controllers/users_controller_test.rb", + "test/controllers/posts_controller_test.rb" + ] +} + +## IMPORTANT +Remember any line that is commented is an excluded file. Any line that is NOT commented - is an included file. \ No newline at end of file diff --git a/gpt_engineer/applications/interactive_cli_loop/repository.py b/gpt_engineer/applications/interactive_cli_loop/repository.py new file mode 100644 index 0000000000..f6d7d9c054 --- /dev/null +++ b/gpt_engineer/applications/interactive_cli_loop/repository.py @@ -0,0 +1,151 @@ +from dataclasses import dataclass +from typing import List + +from git import GitCommandError, Repo + + +@dataclass +class Commit: + """ + Represents a single Git commit with a description and a diff. + """ + + description: str + diff: str + + def __str__(self) -> str: + diff_str = "\n".join(str(d) for d in self.diff) + return f"Commit Description: {self.description}\nDiff:\n{diff_str}" + + +@dataclass +class GitContext: + """ + Represents the Git context of an in progress feature. + """ + + commits: List[Commit] + branch_changes: str + staged_changes: str + unstaged_changes: str + tracked_files: List[str] + + +class Repository: + """ + Manages a git repository, providing functionalities to get repo status, + list files considering .gitignore, and interact with repository history. + """ + + def __init__(self, repo_path: str): + self.repo_path = repo_path + self.repo = Repo(repo_path) + assert not self.repo.bare + + def get_tracked_files(self) -> List[str]: + """ + List all files that are currently tracked by Git in the repository. + """ + try: + tracked_files = self.repo.git.ls_files().split("\n") + return tracked_files + except GitCommandError as e: + print(f"Error listing tracked files: {e}") + return [] + + def get_feature_branch_diff(self): + """ + Get a consolidated diff for the entire feature branch from its divergence point. + + Returns: + - str: The diff representing all changes from the feature branch since its divergence. + """ + current_branch = self.repo.active_branch + + # Get the tracking branch (e.g., 'origin/master') + tracking_branch = current_branch.tracking_branch() + if tracking_branch is None: + print("No tracking branch set, using 'master' as default base branch.") + tracking_branch = self.repo.heads.master # Fallback to 'master' + + try: + # Find the merge base between the current branch and the tracking branch or master + merge_base = self.repo.merge_base(tracking_branch, current_branch) + if merge_base: + merge_base = merge_base[ + 0 + ] # GitPython might return a list of merge bases + + # Generate the diff from the merge base to the latest commit of the feature branch + feature_diff = self.repo.git.diff( + f"{merge_base}..{current_branch}", unified=0 + ) + return feature_diff + except GitCommandError as e: + print(f"Error generating diff: {e}") + return "" + + def get_unstaged_changes(self): + """ + Get the unstaged changes in the repository. + + Returns + ------- + str + The unstaged changes in the repository. + """ + return self.repo.git.diff() + + def get_git_context(self): + staged_changes = self.repo.git.diff("--cached") + unstaged_changes = self.repo.git.diff() + current_branch = self.repo.active_branch + + commits = list(self.repo.iter_commits(rev=current_branch.name)) + + commit_objects = [ + Commit( + commit.summary, + ( + commit.diff(commit.parents[0], create_patch=True) + if commit.parents + else commit.diff(None, create_patch=True) + ), + ) + for commit in commits + ] + + branch_changes = self.get_feature_branch_diff() + + tracked_files = self.get_tracked_files() + + return GitContext( + commit_objects, + branch_changes, + staged_changes, + unstaged_changes, + tracked_files, + ) + + def create_branch(self, branch_name): + """ + Create a new branch in the repository. + + Parameters + ---------- + branch_name : str + The name of the new branch. + """ + self.repo.git.checkout("-b", branch_name) + + def stage_all_changes(self): + """ + Stage all changes in the repository. + """ + self.repo.git.add("--all") + + def undo_unstaged_changes(self): + """ + Undo all unstaged changes in the repository. + """ + self.repo.git.checkout("--", ".") diff --git a/gpt_engineer/applications/interactive_cli_loop/task.py b/gpt_engineer/applications/interactive_cli_loop/task.py new file mode 100644 index 0000000000..a4f46aa89c --- /dev/null +++ b/gpt_engineer/applications/interactive_cli_loop/task.py @@ -0,0 +1,80 @@ +import json +import os +import platform +import subprocess +import shutil +from pathlib import Path +from typing import Union + +from gpt_engineer.core.default.disk_memory import DiskMemory +from gpt_engineer.core.default.paths import memory_path +from gpt_engineer.applications.interactive_cli_loop.file_selection import FileSelector +from gpt_engineer.applications.interactive_cli_loop.repository import Repository + + +class Task(DiskMemory): + """ + Represents a task that will be done one off without the wider context of a feature + """ + + def __init__(self, project_path: Union[str, Path]): + + self._task_path = Path(memory_path(project_path)) / "task" + self.path = self._task_path + self._task_filename = "task.md" + self._files_filename = "files.yml" + + if not os.path.exists(self._task_path): + os.makedirs(self._task_path) + + self.set_task("Please replace with task description") + + super().__init__(self._task_path) + + def delete(self): + shutil.rmtree(self._task_path) + + def set_task(self, task: str): + """ + Updates the task file with new text. + + Parameters + ---------- + task : str + The new task to write to the feature file. + """ + super().__setitem__(self._task_filename, task) + + def get_task(self) -> str: + """ + Retrieve the content of the task file in the database. + + Returns + ------- + str + The content of the feature file. + """ + return super().__getitem__(self._task_filename) + + def _file_path(self, filename): + return self._task_path / filename + + def _open_file_in_editor(self, path): + """ + Opens the generated YAML file in the default system editor. + If the YAML file is empty or doesn't exist, generate it first. + """ + + # Platform-specific methods to open the file + if platform.system() == "Windows": + os.startfile(path) + elif platform.system() == "Darwin": + subprocess.run(["open", path]) + else: # Linux and other Unix-like systems + subprocess.run(["xdg-open", path]) + + def open_task_in_editor(self): + """ + Opens the task file in the default system editor. + """ + self._open_file_in_editor(self._file_path(self._task_filename)) diff --git a/tests/applications/interactive_cli/test_file_selection.py b/tests/applications/interactive_cli/test_file_selection.py index a9777c64cf..19a655c191 100644 --- a/tests/applications/interactive_cli/test_file_selection.py +++ b/tests/applications/interactive_cli/test_file_selection.py @@ -4,7 +4,7 @@ from gpt_engineer.core.ai import AI -from gpt_engineer.applications.interactive_cli.file_selection import ( +from gpt_engineer.applications.interactive_cli_loop.file_selection import ( FileSelection, paths_to_tree, tree_to_paths, @@ -13,7 +13,7 @@ commented_yaml_to_file_selection, ) -from gpt_engineer.applications.interactive_cli.generation_tools import ( +from gpt_engineer.applications.interactive_cli_loop.generation_tools import ( fuzzy_parse_file_selection, ) From 319990a06c2fad5078aa1936444ef6192b3f378e Mon Sep 17 00:00:00 2001 From: Theo McCabe Date: Mon, 10 Jun 2024 22:42:19 +0100 Subject: [PATCH 32/36] working nicely --- .../interactive_cli/agents/agent_steps.py | 383 ++++++++++++++---- .../interactive_cli/agents/chat_agent.py | 12 +- .../interactive_cli/agents/feature_agent.py | 116 +++--- .../interactive_cli/agents/task_agent.py | 94 ----- .../applications/interactive_cli/feature.py | 11 +- .../interactive_cli/generation_tools.py | 15 +- .../applications/interactive_cli/main.py | 47 +-- 7 files changed, 421 insertions(+), 257 deletions(-) delete mode 100644 gpt_engineer/applications/interactive_cli/agents/task_agent.py diff --git a/gpt_engineer/applications/interactive_cli/agents/agent_steps.py b/gpt_engineer/applications/interactive_cli/agents/agent_steps.py index 3d9d2d25b7..e4e268e9ec 100644 --- a/gpt_engineer/applications/interactive_cli/agents/agent_steps.py +++ b/gpt_engineer/applications/interactive_cli/agents/agent_steps.py @@ -1,8 +1,10 @@ -from gpt_engineer.applications.interactive_cli_loop.feature import Feature -from gpt_engineer.applications.interactive_cli_loop.file_selection import FileSelector -from gpt_engineer.applications.interactive_cli_loop.repository import Repository -from gpt_engineer.applications.interactive_cli_loop.files import Files -from gpt_engineer.applications.interactive_cli_loop.generation_tools import ( +from gpt_engineer.applications.interactive_cli.feature import Feature +from gpt_engineer.applications.interactive_cli.file_selection import FileSelector +from gpt_engineer.applications.interactive_cli.repository import ( + Repository, + GitContext, +) +from gpt_engineer.applications.interactive_cli.generation_tools import ( generate_branch_name, build_feature_context_string, generate_suggested_tasks, @@ -15,12 +17,16 @@ from gpt_engineer.core.default.paths import PREPROMPTS_PATH, memory_path from gpt_engineer.core.preprompts_holder import PrepromptsHolder -from prompt_toolkit import prompt as cli_input +from prompt_toolkit import ( + prompt as cli_input, + PromptSession as InputSession, + HTML, + print_formatted_text, +) from prompt_toolkit.validation import ValidationError, Validator -from prompt_toolkit import PromptSession as InputSession -from prompt_toolkit.formatted_text import FormattedText from prompt_toolkit.completion import WordCompleter + from yaspin import yaspin @@ -34,14 +40,88 @@ def validate(self, document): ) -def initialize_new_feature( - ai: AI, feature: Feature, repository: Repository, no_branch: bool -): +def print_feature_state(feature, file_selector): + + if not feature.has_description(): + output = "No active feature." + else: + feature_description = feature.get_description() + file_string = file_selector.get_pretty_selected_from_yaml() + completed_tasks_string = "None" + active_task_string = "None" + + completed_tasks = feature.get_progress()["done"] + + if completed_tasks and len(completed_tasks) > 0: + completed_tasks_string = "\n".join( + [f"• {task}" for task in completed_tasks] + ) + + if feature.has_task(): + active_task_string = feature.get_task() + + output = f""" +--- + +Active Feature + +{feature_description} + +File Selection + +{file_string} + +Completed Tasks + +{completed_tasks_string} + +Active Task + +{active_task_string} + +--- +""" + + print_formatted_text(HTML(output)) + + +def select_create_branch(): + completer = WordCompleter(["1", "2", "x"], ignore_case=True) + session = InputSession() + + # Using prompt to get user input + result = session.prompt( + """Would you like to + +1 - Initialize new feature (on new branch) +2 - Initialize new feature (on current branch) + +x - Exit + +""", + completer=completer, + ).lower() + + print() + + if result == "1": + return True + if result == "2": + return False + if result == "x": + print("Exiting...") + return + + +def initialize_new_feature(ai: AI, feature: Feature, repository: Repository): + + create__branch = select_create_branch() + feature.clear_feature() update_feature_description(feature) - if not no_branch: + if create__branch: print("Creating feature branch... (this can be disabled with -nb setting)") branch_name = generate_branch_name(ai, feature.get_description()) @@ -62,15 +142,84 @@ def update_user_file_selection(file_selector: FileSelector): def update_feature_description(feature: Feature): feature.open_feature_in_editor() - input("Please edit the feature file and then press Enter to continue...") + input("\nPlease edit the feature file and then press Enter to continue...") def update_task_description(feature: Feature): feature.open_task_in_editor() - input("Please edit the task file and then press Enter to continue...") + input("\nPlease edit the task file and then press Enter to continue...") + + +def update_feature(feature: Feature, file_selector: FileSelector): + completer = WordCompleter(["1", "2", "3", "x"], ignore_case=True) + session = InputSession() + + result = session.prompt( + HTML( + """ +Would you like to: + +1 - Edit Feature Description +2 - Edit File Selection +3 - Finish/Deactivate Feature + +x - Exit + +""" + ), + completer=completer, + ).lower() + + print() + + if result == "1": + update_feature_description(feature) + if result == "2": + update_user_file_selection(file_selector) + if result == "3": + print("Sorry! Not implemented yet.") + if result == "x": + print("Exiting...") + return def initiate_new_task(ai, feature, git_context, file_selector): + """ + Runs a flow which ends in the user saving a new task in the task.md file + """ + + completer = WordCompleter(["1", "2", "3", "x"], ignore_case=True) + session = InputSession() + + result = session.prompt( + HTML( + """ +No active task... + +Would you like to: + +1 - Suggest New Tasks (Recommended) +2 - New Custom Task + +x - Exit + +""" + ), + completer=completer, + ).lower() + + print() + + if result == "1": + suggest_new_tasks(ai, feature, git_context, file_selector) + elif result == "2": + update_task_description(feature) + elif result == "x": + print("Exiting...") + return + + +def suggest_new_tasks(ai, feature, git_context, file_selector): files = file_selector.get_included_as_file_repository() @@ -87,42 +236,132 @@ def initiate_new_task(ai, feature, git_context, file_selector): options = [str(i + 1) for i in range(max_tasks)] + ["c"] completer = WordCompleter(options, ignore_case=True) - task_list_message = "\n".join([f"{i + 1}: {tasks[i]}" for i in range(max_tasks)]) + task_list_message = "\n".join( + [f"{i + 1}: {tasks[i]}" for i in range(max_tasks)] + ) def get_prompt(): - return [ - ("class:text", response.planning_thoughts), - ( - "class:text", - "\n\nWould you like to work on one of these suggested tasks or choose your own?\n", - ), - ("class:text", task_list_message), - ("class:text", "\nc: Custom task\n"), - ] + return f""" +AI Reasoning +{response.planning_thoughts} + +Which task would you like to you like to work on? + +{task_list_message} + +c: Custom task + +x: Exit + +""" session = InputSession() - result = session.prompt(FormattedText(get_prompt()), completer=completer).lower() + result = session.prompt(HTML(get_prompt()), completer=completer).lower() + + print() if result in options[:-1]: selected_task = tasks[int(result) - 1] - print(f"Selected task: {selected_task}") feature.set_task(selected_task) if result == "c": update_task_description(feature) + task = feature.get_task() + + print_formatted_text( + HTML( + f"""--- + +Active Task + +{task} + +--- +""" + ) + ) + + +def check_existing_task(feature, file_selector): + completer = WordCompleter(["1", "2", "3", "x"], ignore_case=True) + session = InputSession() + + result = session.prompt( + HTML( + """You have an existing task present + +Would you like to: + +1 - Implement task +2 - Mark task as complete +3 - Discard task and continue + +x - Exit + +""" + ), + completer=completer, + ).lower() + + print() + + if result == "1": + return True + if result == "2": + complete_task(feature, file_selector) + return False + if result == "3": + feature.set_task("") + return True + if result == "x": + print("Exiting...") + return False + + return False + def check_for_unstaged_changes( repository: Repository, ): unstaged_changes = repository.get_unstaged_changes() - if unstaged_changes: - if input( - "Unstaged changes present are you sure you want to proceed? y/n: " - ).lower() not in ["", "y", "yes"]: - print("Ok, not proceeding.") - return + if not unstaged_changes: + return True + + completer = WordCompleter(["1", "2", "3", "x"], ignore_case=True) + session = InputSession() + + result = session.prompt( + HTML( + """Unstaged changes present... + +Would you like to: + +1 - Stage changes and continue +2 - Undo changes and continue +3 - Continue with unstaged changes + +x - Exit + +""" + ), + completer=completer, + ).lower() + + print() + + if result == "1": + repository.stage_all_changes() + if result == "2": + repository.undo_unstaged_changes() + if result == "3": + return True + if result == "x": + print("Exiting...") + return False + + return True def confirm_feature_context_and_task_with_user( @@ -159,10 +398,10 @@ def adjust_prompt_files(): input("Please edit the prompt files and then press Enter to continue...") -def run_task_loop( +def generate_code_for_task( project_path, feature: Feature, - repository: Repository, + git_context: GitContext, ai: AI, file_selector: FileSelector, ): @@ -170,7 +409,7 @@ def run_task_loop( memory = DiskMemory(memory_path(project_path)) preprompts_holder = PrepromptsHolder(PREPROMPTS_PATH) - context_string = build_feature_context_string(feature, repository.get_git_context()) + context_string = build_feature_context_string(feature, git_context) feature_agent_context = f"""I am working on a feature but breaking it up into small incremental tasks. Your job is to complete the incremental task provided to you - only that task and nothing more. @@ -186,15 +425,12 @@ def run_task_loop( ai, prompt, files, memory, preprompts_holder, feature_agent_context ) - print("\n---- begining code generation ----\n") - # Creates loop + print_formatted_text("\n---- Beginning code generation ----\n") updated_files_dictionary = handle_improve_mode(improve_lambda, memory) - print("\n---- ending code generation ----\n") + print("\n---- Ending code generation ----\n") files.write_to_disk(updated_files_dictionary) - review_changes(project_path, feature, repository, ai, file_selector) - def run_adjust_loop(feature, file_selector): implement = confirm_feature_context_and_task_with_user(feature, file_selector) @@ -204,25 +440,10 @@ def run_adjust_loop(feature, file_selector): implement = confirm_feature_context_and_task_with_user(feature, file_selector) -def run_task(repository, project_path, feature, ai, file_selector): - print("Rerunning generation...") - check_for_unstaged_changes(repository) - run_task_loop(project_path, feature, repository, ai, file_selector) - - -def complete_task(repository, project_path, feature, ai, file_selector): - print("Completing task... ") - repository.stage_all_changes() +def complete_task(feature, file_selector): feature.complete_task() file_selector.update_yaml_from_tracked_files() - git_context = repository.get_git_context() - - print("Continuing with next task...") - initiate_new_task(ai, feature, git_context, file_selector) - - check_for_unstaged_changes(repository) - - run_task_loop(project_path, feature, repository, ai, file_selector) + print_formatted_text(HTML("Task Completed\n")) def review_changes( @@ -233,24 +454,50 @@ def review_changes( file_selector: FileSelector, ): - completer = WordCompleter(["r", "c", "u"], ignore_case=True) + completer = WordCompleter(["1", "2", "3", "4", "5", "x"], ignore_case=True) session = InputSession() - # Using prompt to get user input result = session.prompt( - """Please review the unstaged changes generated by GPT Engineer.. + HTML( + """Code generation for task complete -r: Retry the task (incorporating changes to prompt files) -c: Complete task and stage changes -x: Exit -""", +Important: Please review and edit the unstaged changes with your IDE of choice... + +Would you like to: + +1 - Complete task and stage changes (Recommended) +2 - Complete task and don't stage changes + +3 - Undo changes and Retry task +4 - Leave changes and Retry task + +5 - Discard task and continue + +x - Exit + +""" + ), completer=completer, ).lower() - if result == "r": - run_task(repository, project_path, feature, ai, file_selector) - if result == "c": - complete_task(repository, project_path, feature, ai, file_selector) + print() + + if result == "1": + repository.stage_all_changes() + complete_task(feature, file_selector) + if result == "2": + complete_task(feature, file_selector) + if result == "3": + print("Rerunning generation...") + repository.undo_unstaged_changes() + generate_code_for_task(repository, project_path, feature, ai, file_selector) + if result == "4": + print("Rerunning generation...") + repository.undo_unstaged_changes() + generate_code_for_task(repository, project_path, feature, ai, file_selector) + if result == "5": + feature.clear_task() + if result == "x": print("exiting...") return diff --git a/gpt_engineer/applications/interactive_cli/agents/chat_agent.py b/gpt_engineer/applications/interactive_cli/agents/chat_agent.py index 9c92f00530..247d5be7ba 100644 --- a/gpt_engineer/applications/interactive_cli/agents/chat_agent.py +++ b/gpt_engineer/applications/interactive_cli/agents/chat_agent.py @@ -1,13 +1,13 @@ from gpt_engineer.core.ai import AI, HumanMessage, SystemMessage -from gpt_engineer.applications.interactive_cli_loop.feature import Feature -from gpt_engineer.applications.interactive_cli_loop.repository import Repository -from gpt_engineer.applications.interactive_cli_loop.files import Files -from gpt_engineer.applications.interactive_cli_loop.file_selection import FileSelector -from gpt_engineer.applications.interactive_cli_loop.agents.agent_steps import ( +from gpt_engineer.applications.interactive_cli.feature import Feature +from gpt_engineer.applications.interactive_cli.repository import Repository +from gpt_engineer.applications.interactive_cli.files import Files +from gpt_engineer.applications.interactive_cli.file_selection import FileSelector +from gpt_engineer.applications.interactive_cli.agents.agent_steps import ( update_user_file_selection, ) -from gpt_engineer.applications.interactive_cli_loop.generation_tools import ( +from gpt_engineer.applications.interactive_cli.generation_tools import ( build_files_context_string, ) diff --git a/gpt_engineer/applications/interactive_cli/agents/feature_agent.py b/gpt_engineer/applications/interactive_cli/agents/feature_agent.py index 36871d1b23..b7797d8468 100644 --- a/gpt_engineer/applications/interactive_cli/agents/feature_agent.py +++ b/gpt_engineer/applications/interactive_cli/agents/feature_agent.py @@ -1,22 +1,25 @@ -from gpt_engineer.applications.interactive_cli_loop.feature import Feature -from gpt_engineer.applications.interactive_cli_loop.repository import Repository -from gpt_engineer.applications.interactive_cli_loop.domain import Settings -from gpt_engineer.applications.interactive_cli_loop.file_selection import FileSelector -from gpt_engineer.applications.interactive_cli_loop.agents.agent_steps import ( +from gpt_engineer.applications.interactive_cli.feature import Feature +from gpt_engineer.applications.interactive_cli.repository import Repository +from gpt_engineer.applications.interactive_cli.file_selection import FileSelector +from gpt_engineer.applications.interactive_cli.agents.agent_steps import ( initialize_new_feature, update_user_file_selection, - check_for_unstaged_changes, - run_task_loop, - run_adjust_loop, + print_feature_state, + update_feature, initiate_new_task, + generate_code_for_task, + review_changes, + check_existing_task, + check_for_unstaged_changes, ) -from prompt_toolkit import prompt as cli_input +# Bottom comment for testing! from gpt_engineer.core.ai import AI -from gpt_engineer.core.base_agent import BaseAgent +from yaspin import yaspin -class FeatureAgent(BaseAgent): + +class FeatureAgent: """ A cli agent which implements a feature as a set of incremental tasks """ @@ -35,49 +38,68 @@ def __init__( self.repository = repository self.file_selector = file_selector - def init(self, settings: Settings): - - initialize_new_feature( - self.ai, self.feature, self.repository, settings.no_branch - ) + def initialize_feature(self): + initialize_new_feature(self.ai, self.feature, self.repository) update_user_file_selection(self.file_selector) - initiate_new_task(self.ai, self.feature, None, self.file_selector) + print("\nFeature Initialized. Run gptf task to begin working on it.") - self.resume(settings) + def update_feature(self): - def resume(self, settings: Settings): - if self.feature.has_task(): - if cli_input( - "Complete current task and initiate new task? y/n: " - ).lower() in [ - "n", - "no", - ]: - check_for_unstaged_changes(self.repository) - - run_task_loop( - self.project_path, - self.feature, - self.repository, - self.ai, - self.file_selector, - ) + print_feature_state(self.feature, self.file_selector) + + if not self.feature.has_description(): + self.initialize_feature() + else: + update_feature(self.feature, self.repository) - initiate_new_task(self.ai, self.feature, None, self.file_selector) + def run_task(self): + print_feature_state(self.feature, self.file_selector) - run_adjust_loop(self.feature, self.file_selector) + if not self.feature.has_description(): + print( + """Run gptf to initialize new feature. - check_for_unstaged_changes(self.repository) +or - run_task_loop( - self.project_path, - self.feature, - self.repository, - self.ai, - self.file_selector, - ) +Run gptf --no-feature to implement task without a feature""" + ) + return + + if self.feature.has_task(): + cont = check_existing_task(self.feature, self.file_selector) + + if not cont: + return + + while True: + with yaspin(text="Gathering git context...") as spinner: + git_context = self.repository.get_git_context() + spinner.ok("✔") + + if not self.feature.has_task(): + initiate_new_task( + self.ai, self.feature, git_context, self.file_selector + ) - def improve(self): - self.resume() + cont = check_for_unstaged_changes(self.repository) + + if not cont: + return + + generate_code_for_task( + self.project_path, + self.feature, + git_context, + self.ai, + self.file_selector, + ) + + review_changes( + self.project_path, + self.feature, + self.repository, + self.ai, + self.file_selector, + ) diff --git a/gpt_engineer/applications/interactive_cli/agents/task_agent.py b/gpt_engineer/applications/interactive_cli/agents/task_agent.py deleted file mode 100644 index 33fa6a7ce5..0000000000 --- a/gpt_engineer/applications/interactive_cli/agents/task_agent.py +++ /dev/null @@ -1,94 +0,0 @@ -from gpt_engineer.applications.interactive_cli_loop.task import Task -from gpt_engineer.applications.interactive_cli_loop.repository import Repository -from gpt_engineer.applications.interactive_cli_loop.files import Files -from gpt_engineer.applications.interactive_cli_loop.file_selection import FileSelector -from gpt_engineer.applications.interactive_cli_loop.agents.agent_steps import ( - adjust_prompt_files, - check_for_unstaged_changes, - update_user_file_selection, -) - -from gpt_engineer.core.ai import AI -from gpt_engineer.core.prompt import Prompt -from gpt_engineer.core.default.steps import improve_fn, handle_improve_mode -from gpt_engineer.core.default.disk_memory import DiskMemory -from gpt_engineer.core.default.paths import PREPROMPTS_PATH, memory_path -from gpt_engineer.core.preprompts_holder import PrepromptsHolder - -from prompt_toolkit import prompt as cli_input - - -class TaskAgent: - """ - A cli agent which implements a one off task - """ - - def __init__( - self, - ai: AI, - project_path: str, - task: Task, - repository: Repository, - file_selector: FileSelector, - ): - self.ai = ai - self.project_path = project_path - self.task = task - self.repository = repository - self.file_selector = file_selector - - def _confirm__task_with_user(self): - file_selector = self.file_selector - file_selector.update_yaml_from_tracked_files() - file_string = file_selector.get_pretty_selected_from_yaml() - - task = self.task.get_task() - - print(f"Files: \n\nrepo\n{file_string}\n\n") - print(f"Task: {task}\n\n") - - # do you want to attempt this task? - if cli_input("Do you want to implement this task? y/n: ").lower() in [ - "y", - "yes", - ]: - return True - - return False - - def _run_improve_mode(self): - memory = DiskMemory(memory_path(self.project_path)) - preprompts_holder = PrepromptsHolder(PREPROMPTS_PATH) - - prompt = Prompt(self.task.get_task()) - - selected_files = self.file_selector.get_from_yaml().included_files - - files = Files(self.project_path, selected_files) - - improve_lambda = lambda: improve_fn( - self.ai, prompt, files, memory, preprompts_holder - ) - - print("\n---- begining code generation ----\n") - updated_files_dictionary = handle_improve_mode(improve_lambda, memory) - print("\n---- ending code generation ----\n") - - files.write_to_disk(updated_files_dictionary) - - def run(self): - - self.task.open_task_in_editor() - input("Please edit the task file and then press Enter to continue...") - - update_user_file_selection(self.file_selector) - - implement = self._confirm__task_with_user() - - while not implement: - adjust_prompt_files() - implement = self._confirm__task_with_user() - - check_for_unstaged_changes(self.repository) - - self._run_improve_mode() diff --git a/gpt_engineer/applications/interactive_cli/feature.py b/gpt_engineer/applications/interactive_cli/feature.py index 741e5d2214..52b11917c5 100644 --- a/gpt_engineer/applications/interactive_cli/feature.py +++ b/gpt_engineer/applications/interactive_cli/feature.py @@ -42,9 +42,12 @@ def __init__(self, project_path: Union[str, Path], repository: Repository): def clear_feature(self) -> None: self.set_description(self._feature_placeholder) - self.set_task(self._task_placeholder) + self.clear_task() super().__setitem__(self._progress_filename, json.dumps({"done": []})) + def clear_task(self) -> None: + self.set_task(self._task_placeholder) + def get_description(self) -> str: """ Retrieve the content of the feature file in the database. @@ -72,9 +75,9 @@ def has_description(self) -> bool: Does the feature have a description? """ - task = self.get_task() + description = self.get_description() - if task and not task == self._task_placeholder: + if description and not description == self._feature_placeholder: return True return False @@ -106,8 +109,6 @@ def update_progress(self, task: str): """ progress = self.get_progress() - print(progress["done"]) - progress["done"].append(task) json_string = json.dumps(progress, indent=4) diff --git a/gpt_engineer/applications/interactive_cli/generation_tools.py b/gpt_engineer/applications/interactive_cli/generation_tools.py index af6c959bef..4d7582fdce 100644 --- a/gpt_engineer/applications/interactive_cli/generation_tools.py +++ b/gpt_engineer/applications/interactive_cli/generation_tools.py @@ -109,7 +109,7 @@ def generate_suggested_tasks(ai: AI, feature, git_context, files) -> str: - + @@ -137,9 +137,18 @@ def generate_suggested_tasks(ai: AI, feature, git_context, files) -> str: ai.llm.callbacks.append(StreamingStdOutCallbackHandler()) - xml = messages[-1].content.strip() + raw_response = messages[-1].content.strip() - return parse_task_xml_to_class(xml) + xml_start = raw_response.find("<") + xml_end = raw_response.rfind(">") + 1 + xml = raw_response[xml_start:xml_end] + + try: + resp = parse_task_xml_to_class(xml) + except: + print(raw_response) + + return resp def fuzzy_parse_file_selection(ai: AI, yaml_string: str) -> FileSelection: diff --git a/gpt_engineer/applications/interactive_cli/main.py b/gpt_engineer/applications/interactive_cli/main.py index 0a92d843a2..3ef8e4d50a 100644 --- a/gpt_engineer/applications/interactive_cli/main.py +++ b/gpt_engineer/applications/interactive_cli/main.py @@ -1,18 +1,16 @@ import typer from dotenv import load_dotenv -from pathlib import Path -from gpt_engineer.core.default.paths import memory_path -from gpt_engineer.applications.interactive_cli_loop.agents.task_agent import TaskAgent -from gpt_engineer.applications.interactive_cli_loop.agents.feature_agent import ( + +from gpt_engineer.applications.interactive_cli.agents.feature_agent import ( FeatureAgent, ) -from gpt_engineer.applications.interactive_cli_loop.agents.chat_agent import ChatAgent -from gpt_engineer.applications.interactive_cli_loop.feature import Feature -from gpt_engineer.applications.interactive_cli_loop.task import Task -from gpt_engineer.applications.interactive_cli_loop.repository import Repository -from gpt_engineer.applications.interactive_cli_loop.domain import Settings -from gpt_engineer.applications.interactive_cli_loop.file_selection import FileSelector +from gpt_engineer.applications.interactive_cli.agents.chat_agent import ChatAgent +from gpt_engineer.applications.interactive_cli.feature import Feature +from gpt_engineer.applications.interactive_cli.task import Task +from gpt_engineer.applications.interactive_cli.repository import Repository +from gpt_engineer.applications.interactive_cli.domain import Settings +from gpt_engineer.applications.interactive_cli.file_selection import FileSelector from gpt_engineer.core.ai import AI @@ -31,25 +29,12 @@ def feature( "-t", help="Controls randomness: lower values for more focused, deterministic outputs.", ), - azure_endpoint: str = typer.Option( - "", - "--azure", - "-a", - help="""Endpoint for your Azure OpenAI Service (https://xx.openai.azure.com). - In that case, the given model is the deployment name chosen in the Azure AI Studio.""", - ), verbose: bool = typer.Option( False, "--verbose", "-v", help="Enable verbose logging for debugging." ), debug: bool = typer.Option( False, "--debug", "-d", help="Enable debug mode for debugging." ), - no_branch: bool = typer.Option( - False, - "--no-branch", - "-nb", - help="Do not create a new feature branch for this work.", - ), ): """ Handle features in the project. @@ -69,12 +54,10 @@ def feature( agent = FeatureAgent(ai, project_path, feature, repository, file_selector) - settings = Settings(no_branch) - if new: - agent.init(settings) + agent.initialize_feature() else: - agent.resume(settings) + agent.update_feature() @app.command() @@ -134,14 +117,10 @@ def task( repository = Repository(project_path) - task = Task(project_path) + feature = Feature(project_path, repository) file_selector = FileSelector(project_path, repository) - task_agent = TaskAgent(ai, project_path, task, repository, file_selector) - - task_agent.run() - - # review + agent = FeatureAgent(ai, project_path, feature, repository, file_selector) - # task.delete() + agent.run_task() From 320588c4619c1dfe7c0947e1052b0963b5f3c810 Mon Sep 17 00:00:00 2001 From: Theo McCabe Date: Mon, 10 Jun 2024 22:42:36 +0100 Subject: [PATCH 33/36] delete old one --- .../interactive_cli_loop/__init__.py | 0 .../interactive_cli_loop/agents/__init__.py | 0 .../agents/agent_steps.py | 256 ---------- .../interactive_cli_loop/agents/chat_agent.py | 68 --- .../agents/feature_agent.py | 83 --- .../interactive_cli_loop/agents/task_agent.py | 94 ---- .../interactive_cli_loop/domain.py | 13 - .../interactive_cli_loop/example_project | 1 - .../interactive_cli_loop/feature.py | 181 ------- .../interactive_cli_loop/file_selection.py | 320 ------------ .../interactive_cli_loop/files.py | 40 -- .../interactive_cli_loop/generation_tools.py | 483 ------------------ .../applications/interactive_cli_loop/main.py | 147 ------ .../interactive_cli_loop/prompts/__init__.py | 0 .../prompts/fuzzy_file_parser | 320 ------------ .../interactive_cli_loop/repository.py | 151 ------ .../applications/interactive_cli_loop/task.py | 80 --- 17 files changed, 2237 deletions(-) delete mode 100644 gpt_engineer/applications/interactive_cli_loop/__init__.py delete mode 100644 gpt_engineer/applications/interactive_cli_loop/agents/__init__.py delete mode 100644 gpt_engineer/applications/interactive_cli_loop/agents/agent_steps.py delete mode 100644 gpt_engineer/applications/interactive_cli_loop/agents/chat_agent.py delete mode 100644 gpt_engineer/applications/interactive_cli_loop/agents/feature_agent.py delete mode 100644 gpt_engineer/applications/interactive_cli_loop/agents/task_agent.py delete mode 100644 gpt_engineer/applications/interactive_cli_loop/domain.py delete mode 160000 gpt_engineer/applications/interactive_cli_loop/example_project delete mode 100644 gpt_engineer/applications/interactive_cli_loop/feature.py delete mode 100644 gpt_engineer/applications/interactive_cli_loop/file_selection.py delete mode 100644 gpt_engineer/applications/interactive_cli_loop/files.py delete mode 100644 gpt_engineer/applications/interactive_cli_loop/generation_tools.py delete mode 100644 gpt_engineer/applications/interactive_cli_loop/main.py delete mode 100644 gpt_engineer/applications/interactive_cli_loop/prompts/__init__.py delete mode 100644 gpt_engineer/applications/interactive_cli_loop/prompts/fuzzy_file_parser delete mode 100644 gpt_engineer/applications/interactive_cli_loop/repository.py delete mode 100644 gpt_engineer/applications/interactive_cli_loop/task.py diff --git a/gpt_engineer/applications/interactive_cli_loop/__init__.py b/gpt_engineer/applications/interactive_cli_loop/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/gpt_engineer/applications/interactive_cli_loop/agents/__init__.py b/gpt_engineer/applications/interactive_cli_loop/agents/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/gpt_engineer/applications/interactive_cli_loop/agents/agent_steps.py b/gpt_engineer/applications/interactive_cli_loop/agents/agent_steps.py deleted file mode 100644 index 3d9d2d25b7..0000000000 --- a/gpt_engineer/applications/interactive_cli_loop/agents/agent_steps.py +++ /dev/null @@ -1,256 +0,0 @@ -from gpt_engineer.applications.interactive_cli_loop.feature import Feature -from gpt_engineer.applications.interactive_cli_loop.file_selection import FileSelector -from gpt_engineer.applications.interactive_cli_loop.repository import Repository -from gpt_engineer.applications.interactive_cli_loop.files import Files -from gpt_engineer.applications.interactive_cli_loop.generation_tools import ( - generate_branch_name, - build_feature_context_string, - generate_suggested_tasks, -) - -from gpt_engineer.core.ai import AI -from gpt_engineer.core.prompt import Prompt -from gpt_engineer.core.default.steps import improve_fn, handle_improve_mode -from gpt_engineer.core.default.disk_memory import DiskMemory -from gpt_engineer.core.default.paths import PREPROMPTS_PATH, memory_path -from gpt_engineer.core.preprompts_holder import PrepromptsHolder - -from prompt_toolkit import prompt as cli_input -from prompt_toolkit.validation import ValidationError, Validator -from prompt_toolkit import PromptSession as InputSession -from prompt_toolkit.formatted_text import FormattedText -from prompt_toolkit.completion import WordCompleter - -from yaspin import yaspin - - -# This is a random comment to prove the assistant works -class FeatureValidator(Validator): - def validate(self, document): - text = document.text - if not text: - raise ValidationError( - message="Feature description cannot be empty", cursor_position=len(text) - ) - - -def initialize_new_feature( - ai: AI, feature: Feature, repository: Repository, no_branch: bool -): - feature.clear_feature() - - update_feature_description(feature) - - if not no_branch: - print("Creating feature branch... (this can be disabled with -nb setting)") - - branch_name = generate_branch_name(ai, feature.get_description()) - - branch_name = cli_input("\nConfirm branch name: ", default=branch_name) - - repository.create_branch(branch_name) - print("\nFeature branch created.\n") - - -def update_user_file_selection(file_selector: FileSelector): - file_selector.update_yaml_from_tracked_files() - file_selector.open_yaml_in_editor() - input( - "Please edit the file selection for this feature and then press Enter to continue..." - ) - - -def update_feature_description(feature: Feature): - feature.open_feature_in_editor() - input("Please edit the feature file and then press Enter to continue...") - - -def update_task_description(feature: Feature): - feature.open_task_in_editor() - input("Please edit the task file and then press Enter to continue...") - - -def initiate_new_task(ai, feature, git_context, file_selector): - - files = file_selector.get_included_as_file_repository() - - try: - with yaspin(text="Generating suggested tasks...") as spinner: - response = generate_suggested_tasks(ai, feature, git_context, files) - spinner.ok("✔") # Success message - except Exception as e: - raise RuntimeError("Error generating task suggestions.") from e - - tasks = response.tasks - - max_tasks = min(len(tasks), 3) - options = [str(i + 1) for i in range(max_tasks)] + ["c"] - completer = WordCompleter(options, ignore_case=True) - - task_list_message = "\n".join([f"{i + 1}: {tasks[i]}" for i in range(max_tasks)]) - - def get_prompt(): - return [ - ("class:text", response.planning_thoughts), - ( - "class:text", - "\n\nWould you like to work on one of these suggested tasks or choose your own?\n", - ), - ("class:text", task_list_message), - ("class:text", "\nc: Custom task\n"), - ] - - session = InputSession() - result = session.prompt(FormattedText(get_prompt()), completer=completer).lower() - - if result in options[:-1]: - selected_task = tasks[int(result) - 1] - print(f"Selected task: {selected_task}") - feature.set_task(selected_task) - - if result == "c": - update_task_description(feature) - - -def check_for_unstaged_changes( - repository: Repository, -): - unstaged_changes = repository.get_unstaged_changes() - - if unstaged_changes: - if input( - "Unstaged changes present are you sure you want to proceed? y/n: " - ).lower() not in ["", "y", "yes"]: - print("Ok, not proceeding.") - return - - -def confirm_feature_context_and_task_with_user( - feature: Feature, file_selector: FileSelector -): - file_selector.update_yaml_from_tracked_files() - file_string = file_selector.get_pretty_selected_from_yaml() - - feature_description = feature.get_description() - task = feature.get_task() - - # list feature, files and task - print(f"Feature: {feature_description}\n\n") - print(f"Files: \n\nrepo\n{file_string}\n\n") - print(f"Task: {task}\n\n") - - # do you want to attempt this task? - if cli_input("Do you want to implement this task? y/n: ").lower() in [ - "y", - "yes", - ]: - return True - - return False - - -# todo : create a function which uses the test4.py example code approach to offer a selection of options to the user -# f - "edit feature" using update_feature_description step -# s - "edit file selection" using update_user_file_selection step -# t - "edit task" using update_task_description step -# c - complete the task and start a new one -# x - exit -def adjust_prompt_files(): - input("Please edit the prompt files and then press Enter to continue...") - - -def run_task_loop( - project_path, - feature: Feature, - repository: Repository, - ai: AI, - file_selector: FileSelector, -): - - memory = DiskMemory(memory_path(project_path)) - preprompts_holder = PrepromptsHolder(PREPROMPTS_PATH) - - context_string = build_feature_context_string(feature, repository.get_git_context()) - - feature_agent_context = f"""I am working on a feature but breaking it up into small incremental tasks. Your job is to complete the incremental task provided to you - only that task and nothing more. - -The purpose of this message is to give you wider context around the feature you are working on and what incremental tasks have already been completed so far. - -{context_string}""" - - prompt = Prompt(feature.get_task(), prefix="Task: ") - - files = file_selector.get_included_as_file_repository() - - improve_lambda = lambda: improve_fn( - ai, prompt, files, memory, preprompts_holder, feature_agent_context - ) - - print("\n---- begining code generation ----\n") - # Creates loop - updated_files_dictionary = handle_improve_mode(improve_lambda, memory) - print("\n---- ending code generation ----\n") - - files.write_to_disk(updated_files_dictionary) - - review_changes(project_path, feature, repository, ai, file_selector) - - -def run_adjust_loop(feature, file_selector): - implement = confirm_feature_context_and_task_with_user(feature, file_selector) - - while not implement: - adjust_prompt_files() - implement = confirm_feature_context_and_task_with_user(feature, file_selector) - - -def run_task(repository, project_path, feature, ai, file_selector): - print("Rerunning generation...") - check_for_unstaged_changes(repository) - run_task_loop(project_path, feature, repository, ai, file_selector) - - -def complete_task(repository, project_path, feature, ai, file_selector): - print("Completing task... ") - repository.stage_all_changes() - feature.complete_task() - file_selector.update_yaml_from_tracked_files() - git_context = repository.get_git_context() - - print("Continuing with next task...") - initiate_new_task(ai, feature, git_context, file_selector) - - check_for_unstaged_changes(repository) - - run_task_loop(project_path, feature, repository, ai, file_selector) - - -def review_changes( - project_path, - feature: Feature, - repository: Repository, - ai: AI, - file_selector: FileSelector, -): - - completer = WordCompleter(["r", "c", "u"], ignore_case=True) - session = InputSession() - - # Using prompt to get user input - result = session.prompt( - """Please review the unstaged changes generated by GPT Engineer.. - -r: Retry the task (incorporating changes to prompt files) -c: Complete task and stage changes -x: Exit -""", - completer=completer, - ).lower() - - if result == "r": - run_task(repository, project_path, feature, ai, file_selector) - if result == "c": - complete_task(repository, project_path, feature, ai, file_selector) - if result == "x": - print("exiting...") - return diff --git a/gpt_engineer/applications/interactive_cli_loop/agents/chat_agent.py b/gpt_engineer/applications/interactive_cli_loop/agents/chat_agent.py deleted file mode 100644 index 9c92f00530..0000000000 --- a/gpt_engineer/applications/interactive_cli_loop/agents/chat_agent.py +++ /dev/null @@ -1,68 +0,0 @@ -from gpt_engineer.core.ai import AI, HumanMessage, SystemMessage - -from gpt_engineer.applications.interactive_cli_loop.feature import Feature -from gpt_engineer.applications.interactive_cli_loop.repository import Repository -from gpt_engineer.applications.interactive_cli_loop.files import Files -from gpt_engineer.applications.interactive_cli_loop.file_selection import FileSelector -from gpt_engineer.applications.interactive_cli_loop.agents.agent_steps import ( - update_user_file_selection, -) -from gpt_engineer.applications.interactive_cli_loop.generation_tools import ( - build_files_context_string, -) - - -class ChatAgent: - - def __init__( - self, - ai: AI, - project_path: str, - feature: Feature, - repository: Repository, - file_selector: FileSelector, - ): - self.ai = ai - self.project_path = project_path - self.feature = feature - self.repository = repository - self.file_selector = file_selector - - def start(self): - - update_user_file_selection(self.file_selector) - - selected_files = self.file_selector.get_from_yaml().included_files - - files = Files(self.project_path, selected_files) - - context_string = build_files_context_string( - self.feature, self.repository.get_git_context(), files - ) - - system = f"""You are the chat function of an AI software engineering tool called gpt engineer. - -The tool takes a feature descriptioin, progress on the feature, git context, and repository files relevent to the feature -and based on that it suggests new tasks to complete in order to progress the feature, and it implements those tasks for the user. - -You are not that tool, you are the chat function of that tool. You are here to help the user discuss their code and their feature and understand discuss any part of it with you - a software engineering expert. - -Always provide advice as to best software engineering practices. - -Here is the context for your conversation: - -{context_string}""" - - messages = [ - SystemMessage(content=system), - HumanMessage(content="Hi"), - ] - - while True: - print("\nAI:") - response = self.ai.backoff_inference(messages) - messages.append(response) - - print("\n\nYou:") - user_message = input() - messages.append(HumanMessage(content=user_message)) diff --git a/gpt_engineer/applications/interactive_cli_loop/agents/feature_agent.py b/gpt_engineer/applications/interactive_cli_loop/agents/feature_agent.py deleted file mode 100644 index 36871d1b23..0000000000 --- a/gpt_engineer/applications/interactive_cli_loop/agents/feature_agent.py +++ /dev/null @@ -1,83 +0,0 @@ -from gpt_engineer.applications.interactive_cli_loop.feature import Feature -from gpt_engineer.applications.interactive_cli_loop.repository import Repository -from gpt_engineer.applications.interactive_cli_loop.domain import Settings -from gpt_engineer.applications.interactive_cli_loop.file_selection import FileSelector -from gpt_engineer.applications.interactive_cli_loop.agents.agent_steps import ( - initialize_new_feature, - update_user_file_selection, - check_for_unstaged_changes, - run_task_loop, - run_adjust_loop, - initiate_new_task, -) -from prompt_toolkit import prompt as cli_input - -from gpt_engineer.core.ai import AI -from gpt_engineer.core.base_agent import BaseAgent - - -class FeatureAgent(BaseAgent): - """ - A cli agent which implements a feature as a set of incremental tasks - """ - - def __init__( - self, - ai: AI, - project_path: str, - feature: Feature, - repository: Repository, - file_selector: FileSelector, - ): - self.ai = ai - self.project_path = project_path - self.feature = feature - self.repository = repository - self.file_selector = file_selector - - def init(self, settings: Settings): - - initialize_new_feature( - self.ai, self.feature, self.repository, settings.no_branch - ) - - update_user_file_selection(self.file_selector) - - initiate_new_task(self.ai, self.feature, None, self.file_selector) - - self.resume(settings) - - def resume(self, settings: Settings): - if self.feature.has_task(): - if cli_input( - "Complete current task and initiate new task? y/n: " - ).lower() in [ - "n", - "no", - ]: - check_for_unstaged_changes(self.repository) - - run_task_loop( - self.project_path, - self.feature, - self.repository, - self.ai, - self.file_selector, - ) - - initiate_new_task(self.ai, self.feature, None, self.file_selector) - - run_adjust_loop(self.feature, self.file_selector) - - check_for_unstaged_changes(self.repository) - - run_task_loop( - self.project_path, - self.feature, - self.repository, - self.ai, - self.file_selector, - ) - - def improve(self): - self.resume() diff --git a/gpt_engineer/applications/interactive_cli_loop/agents/task_agent.py b/gpt_engineer/applications/interactive_cli_loop/agents/task_agent.py deleted file mode 100644 index 33fa6a7ce5..0000000000 --- a/gpt_engineer/applications/interactive_cli_loop/agents/task_agent.py +++ /dev/null @@ -1,94 +0,0 @@ -from gpt_engineer.applications.interactive_cli_loop.task import Task -from gpt_engineer.applications.interactive_cli_loop.repository import Repository -from gpt_engineer.applications.interactive_cli_loop.files import Files -from gpt_engineer.applications.interactive_cli_loop.file_selection import FileSelector -from gpt_engineer.applications.interactive_cli_loop.agents.agent_steps import ( - adjust_prompt_files, - check_for_unstaged_changes, - update_user_file_selection, -) - -from gpt_engineer.core.ai import AI -from gpt_engineer.core.prompt import Prompt -from gpt_engineer.core.default.steps import improve_fn, handle_improve_mode -from gpt_engineer.core.default.disk_memory import DiskMemory -from gpt_engineer.core.default.paths import PREPROMPTS_PATH, memory_path -from gpt_engineer.core.preprompts_holder import PrepromptsHolder - -from prompt_toolkit import prompt as cli_input - - -class TaskAgent: - """ - A cli agent which implements a one off task - """ - - def __init__( - self, - ai: AI, - project_path: str, - task: Task, - repository: Repository, - file_selector: FileSelector, - ): - self.ai = ai - self.project_path = project_path - self.task = task - self.repository = repository - self.file_selector = file_selector - - def _confirm__task_with_user(self): - file_selector = self.file_selector - file_selector.update_yaml_from_tracked_files() - file_string = file_selector.get_pretty_selected_from_yaml() - - task = self.task.get_task() - - print(f"Files: \n\nrepo\n{file_string}\n\n") - print(f"Task: {task}\n\n") - - # do you want to attempt this task? - if cli_input("Do you want to implement this task? y/n: ").lower() in [ - "y", - "yes", - ]: - return True - - return False - - def _run_improve_mode(self): - memory = DiskMemory(memory_path(self.project_path)) - preprompts_holder = PrepromptsHolder(PREPROMPTS_PATH) - - prompt = Prompt(self.task.get_task()) - - selected_files = self.file_selector.get_from_yaml().included_files - - files = Files(self.project_path, selected_files) - - improve_lambda = lambda: improve_fn( - self.ai, prompt, files, memory, preprompts_holder - ) - - print("\n---- begining code generation ----\n") - updated_files_dictionary = handle_improve_mode(improve_lambda, memory) - print("\n---- ending code generation ----\n") - - files.write_to_disk(updated_files_dictionary) - - def run(self): - - self.task.open_task_in_editor() - input("Please edit the task file and then press Enter to continue...") - - update_user_file_selection(self.file_selector) - - implement = self._confirm__task_with_user() - - while not implement: - adjust_prompt_files() - implement = self._confirm__task_with_user() - - check_for_unstaged_changes(self.repository) - - self._run_improve_mode() diff --git a/gpt_engineer/applications/interactive_cli_loop/domain.py b/gpt_engineer/applications/interactive_cli_loop/domain.py deleted file mode 100644 index 2493c40772..0000000000 --- a/gpt_engineer/applications/interactive_cli_loop/domain.py +++ /dev/null @@ -1,13 +0,0 @@ -from dataclasses import dataclass -from typing import List - - -@dataclass -class FileSelection: - included_files: List[str] - excluded_files: List[str] - - -class Settings: - def __init__(self, no_branch: bool = False): - self.no_branch = no_branch diff --git a/gpt_engineer/applications/interactive_cli_loop/example_project b/gpt_engineer/applications/interactive_cli_loop/example_project deleted file mode 160000 index b22fbe6c76..0000000000 --- a/gpt_engineer/applications/interactive_cli_loop/example_project +++ /dev/null @@ -1 +0,0 @@ -Subproject commit b22fbe6c760ac196edacdfb508ad300d033e12d9 diff --git a/gpt_engineer/applications/interactive_cli_loop/feature.py b/gpt_engineer/applications/interactive_cli_loop/feature.py deleted file mode 100644 index 909e5a36c3..0000000000 --- a/gpt_engineer/applications/interactive_cli_loop/feature.py +++ /dev/null @@ -1,181 +0,0 @@ -import json -import os -import platform -import subprocess -from pathlib import Path -from typing import Union - -from gpt_engineer.core.default.disk_memory import DiskMemory -from gpt_engineer.core.default.paths import memory_path -from gpt_engineer.applications.interactive_cli_loop.file_selection import FileSelector -from gpt_engineer.applications.interactive_cli_loop.repository import Repository - - -class Feature(DiskMemory): - """ - Represents a ticket which will be developed incrementally, - - Includes with a feature (overal description of the change), - a task (current incremental work item), - and progress (history of incremental work completed) - """ - - def __init__(self, project_path: Union[str, Path], repository: Repository): - - self._feature_path = Path(memory_path(project_path)) / "feature" - self.path = self._feature_path - self._feature_filename = "feature.md" - self._progress_filename = "progress.json" - self._task_filename = "task.md" - - self._feature_placeholder = """Please replace with your own feature description. Markdown is supported. - -Hint: -Improve your prompts by including technical references to any APIs, libraries, components etc that the pre trained model may not know about in detail already.""" - - self._task_placeholder = "Please replace with a task description - directing the AI on the first task to implement on this feature" - - if not os.path.exists(self._feature_path): - os.makedirs(self._feature_path) - - super().__init__(self._feature_path) - - def clear_feature(self) -> None: - self.set_description(self._feature_placeholder) - self.set_task(self._task_placeholder) - super().__setitem__(self._progress_filename, json.dumps({"done": []})) - - def get_description(self) -> str: - """ - Retrieve the content of the feature file in the database. - - Returns - ------- - str - The content of the feature file. - """ - return super().__getitem__(self._feature_filename) - - def set_description(self, feature_description: str): - """ - Updates the feature file with new text. - - Parameters - ---------- - feature_description : str - The new feature_description to write to the feature file. - """ - super().__setitem__(self._feature_filename, feature_description) - - def get_progress(self) -> dict: - """ - Retrieve the progress object. - - Returns - ------- - str - The content of the feature file. - """ - - json_string = super().__getitem__(self._progress_filename) - if json_string: - return json.loads(json_string) - - return None - - def update_progress(self, task: str): - """ - Updates the progress with a new completed task. - - Parameters - ---------- - feature_description : str - The new feature_description to write to the feature file. - """ - progress = self.get_progress() - - print(progress["done"]) - - progress["done"].append(task) - - json_string = json.dumps(progress, indent=4) - - super().__setitem__(self._progress_filename, json_string) - - def set_task(self, task: str): - """ - Updates the task file with new text. - - Parameters - ---------- - task : str - The new task to write to the feature file. - """ - super().__setitem__(self._task_filename, task) - - def get_task(self) -> str: - """ - Retrieve the content of the feature file in the database. - - Returns - ------- - str - The content of the feature file. - """ - return super().__getitem__(self._task_filename) - - def has_task(self) -> bool: - """ - Retrieve the content of the feature file in the database. - - Returns - ------- - str - The content of the feature file. - """ - - task = self.get_task() - - if task and not task == self._task_placeholder: - return True - - return False - - def complete_task(self): - """ - Moves the current task to the 'done' list in the progress.json file and clears the task file. - """ - task = self.get_task() - - if task: - self.update_progress(task) - self.set_task("") - - def _file_path(self, filename): - return self._feature_path / filename - - def _open_file_in_editor(self, path): - """ - Opens the generated YAML file in the default system editor. - If the YAML file is empty or doesn't exist, generate it first. - """ - - # Platform-specific methods to open the file - if platform.system() == "Windows": - os.startfile(path) - elif platform.system() == "Darwin": - subprocess.run(["open", path]) - else: # Linux and other Unix-like systems - subprocess.run(["xdg-open", path]) - - def open_feature_in_editor(self): - """ - Opens the feature file in the default system editor. - """ - self._open_file_in_editor(self._file_path(self._feature_filename)) - - def open_task_in_editor(self): - """ - Opens the task file in the default system editor. - """ - self._open_file_in_editor(self._file_path(self._task_filename)) diff --git a/gpt_engineer/applications/interactive_cli_loop/file_selection.py b/gpt_engineer/applications/interactive_cli_loop/file_selection.py deleted file mode 100644 index 6ccfeed17c..0000000000 --- a/gpt_engineer/applications/interactive_cli_loop/file_selection.py +++ /dev/null @@ -1,320 +0,0 @@ -import os -import platform -import subprocess -import yaml -from pathlib import Path - - -from gpt_engineer.core.default.paths import memory_path -from gpt_engineer.core.ai import AI - -from gpt_engineer.applications.interactive_cli_loop.repository import Repository -from gpt_engineer.applications.interactive_cli_loop.files import Files -from gpt_engineer.applications.interactive_cli_loop.generation_tools import ( - fuzzy_parse_file_selection, -) -from gpt_engineer.applications.interactive_cli_loop.domain import FileSelection - - -def paths_to_tree(paths): - tree = {} - files_marker = "(./)" - - for path in paths: - parts = path.split("/") - current_level = tree - - for part in parts[:-1]: - if part not in current_level: - current_level[part] = {} - current_level = current_level[part] - - if isinstance(current_level, dict): - if files_marker not in current_level: - current_level[files_marker] = [] - current_level[files_marker].append(parts[-1]) - - # Clean and sort the tree to match the required format - def clean_tree(node): - if not isinstance(node, dict): - return node - sorted_keys = sorted(node.keys(), key=lambda x: (x == files_marker, x)) - cleaned_node = {key: clean_tree(node[key]) for key in sorted_keys} - if sorted_keys == [files_marker]: - return cleaned_node[files_marker] - return cleaned_node - - cleaned_tree = clean_tree(tree) - return cleaned_tree - - -def tree_to_paths(tree): - - files_marker = "(./)" - - def traverse_tree(tree, base_path=""): - paths = [] - if tree: - for key, value in tree.items(): - if key == files_marker: - if value: - for file in value: - paths.append(os.path.join(base_path, file)) - elif isinstance(value, list): - for file in value: - paths.append(os.path.join(base_path, key, file)) - else: - subfolder_path = os.path.join(base_path, key) - paths.extend(traverse_tree(value, subfolder_path)) - return paths - - return traverse_tree(tree) - - -def commented_yaml_to_file_selection(commented_content) -> FileSelection: - commented_content_lines = commented_content.split("\n") - uncommented_content_1 = "\n".join( - line.replace("# ", "").replace("#", "") for line in commented_content_lines - ) - uncommented_content_2 = "\n".join( - line.replace("#", "") for line in commented_content_lines - ) - - included_files = tree_to_paths(yaml.safe_load(commented_content)) - try: - all_files = tree_to_paths(yaml.safe_load(uncommented_content_1)) - except: - try: - all_files = tree_to_paths(yaml.safe_load(uncommented_content_2)) - except: - raise ValueError( - "Could not convert the commented yaml to a file selection. Please check the format." - ) - - included_files_not_in_all_files = set(included_files) - set(all_files) - - if len(included_files_not_in_all_files) > 0: - raise ValueError("Yaml file selection has not been read correctly.") - - excluded_files = list(set(all_files) - set(included_files)) - return FileSelection(included_files, excluded_files) - - -def file_selection_to_commented_yaml(selection: FileSelection) -> str: - # Dont worry about commenting lines if they are no excluded files - if not selection.excluded_files: - tree = paths_to_tree(selection.included_files) - - return yaml.dump(tree, sort_keys=False) - - all_files = list(selection.included_files) + list(selection.excluded_files) - - current_tree = paths_to_tree(all_files) - - # Add a # in front of files which are excluded. This is a marker for us to go back and properly comment them out - def mark_excluded_files(structure, prefix=""): - if isinstance(structure, dict): - for key, value in structure.items(): - if key == "(./)": - structure[key] = mark_excluded_files(value, prefix) - else: - new_prefix = os.path.join(prefix, key) - structure[key] = mark_excluded_files(value, new_prefix) - elif isinstance(structure, list): - for i, item in enumerate(structure): - full_path = os.path.join(prefix, item) - - if full_path in selection.excluded_files: - structure[i] = f"#{item}" - - return structure - - mark_excluded_files(current_tree) - - content = yaml.dump(current_tree, sort_keys=False) - - # Find all files marked for commenting - add comment and remove the mark. - def comment_marked_files(yaml_content): - lines = yaml_content.split("\n") - - updated_lines = [] - for line in lines: - if "#" in line: - line = line.replace("- '#", "#- ").replace("'", "") - updated_lines.append(line) - - return "\n".join(updated_lines) - - commented_yaml = comment_marked_files(content) - - return commented_yaml - - -class FileSelector: - """ - Manages the active files in a project directory and creates a YAML file listing them. - """ - - def __init__(self, project_path: str, repository: Repository): - self.project_path = project_path - self.ai = AI("gpt-4o", temperature=0) - self.repository = repository - self.yaml_path = Path(memory_path(project_path)) / "files.yml" - - if os.path.exists(self.yaml_path): - return - - print("YAML file is missing or empty, generating YAML...") - - file_selection = FileSelection([], self.repository.get_tracked_files()) - - self.set_to_yaml(file_selection) - - def _write_yaml_with_header(self, yaml_content): - - def add_indentation(content): - lines = content.split("\n") - new_lines = [] - last_key = None - - for line in lines: - stripped_line = line.replace("#", "").strip() - if stripped_line.endswith(":"): - last_key = stripped_line - if stripped_line.startswith("- ") and (last_key != "(./):"): - # add 2 spaces at the begining of line or after any # - - new_lines.append(" " + line) # Add extra indentation - else: - new_lines.append(line) - return "\n".join(new_lines) - - indented_content = add_indentation(yaml_content) - with open(self.yaml_path, "w") as file: - file.write( - f"""# Uncomment any files you would like to use for this feature -# Note that (./) is a special key which represents files at the root of the parent directory - -{indented_content}""" - ) - - def _read_yaml_with_headers(self): - with open(self.yaml_path, "r") as file: - original_content_lines = file.readlines()[3:] - - return "".join(original_content_lines) - - def set_to_yaml(self, file_selection): - - commented_yaml = file_selection_to_commented_yaml(file_selection) - - self._write_yaml_with_header(commented_yaml) - - return - - def update_yaml_from_tracked_files(self): - """ - Updates the YAML file with the current list of tracked files. - """ - - tracked_files = self.repository.get_tracked_files() - - file_selection = self.get_from_yaml() - - # If there are no changes, do nothing - if set(tracked_files) == set( - file_selection.included_files + file_selection.excluded_files - ): - return - - new_included_files = list( - set(tracked_files) - set(file_selection.excluded_files) - ) - - self.set_to_yaml( - FileSelection(new_included_files, file_selection.excluded_files) - ) - - def get_from_yaml(self) -> FileSelection: - """ - Get selected file paths and excluded file paths from yaml - """ - - yaml_content = self._read_yaml_with_headers() - - try: - file_selection = commented_yaml_to_file_selection(yaml_content) - except: - print( - "Could not read the file selection from the YAML file. Attempting to fix with AI" - ) - print(yaml_content) - file_selection = fuzzy_parse_file_selection(self.ai, yaml_content) - self.set_to_yaml(file_selection) - - return file_selection - - def get_pretty_selected_from_yaml(self) -> str: - """ - Retrieves selected file paths from the YAML file and prints them in an ASCII-style tree structure. - """ - # Get selected files from YAML - file_selection = self.get_from_yaml() - - # Helper function to insert a path into the tree dictionary - def insert_path(tree, path_parts): - # Recursively build nested dictionary from path parts - if not path_parts: - return - if path_parts[0] not in tree: - tree[path_parts[0]] = {} - insert_path(tree[path_parts[0]], path_parts[1:]) - - file_tree = {} - for filepath in file_selection.included_files: - parts = filepath.split("/") - insert_path(file_tree, parts) - - # Helper function to format the tree into a string with ASCII graphics - def format_tree(tree, prefix=""): - lines = [] - # Separate directories and files - directories = {k: v for k, v in tree.items() if v} - files = {k: v for k, v in tree.items() if not v} - # Sort items to keep alphabetical order, directories first - items = sorted(directories.items()) + sorted(files.items()) - for i, (key, sub_tree) in enumerate(items): - if i == len(items) - 1: # Last item uses └── - lines.append(prefix + "└── " + key) - extension = " " - else: - lines.append(prefix + "├── " + key) - extension = "│ " - if sub_tree: - lines.extend(format_tree(sub_tree, prefix=prefix + extension)) - return lines - - # Generate formatted tree lines - tree_lines = format_tree(file_tree) - - # Join lines and return as a string - return "\n".join(tree_lines) - - def open_yaml_in_editor(self): - """ - Opens the generated YAML file in the default system editor. - If the YAML file is empty or doesn't exist, generate it first. - """ - - # Platform-specific methods to open the file - if platform.system() == "Windows": - os.startfile(self.yaml_path) - elif platform.system() == "Darwin": - subprocess.run(["open", self.yaml_path]) - else: # Linux and other Unix-like systems - subprocess.run(["xdg-open", self.yaml_path]) - - def get_included_as_file_repository(self): - file_selection = self.get_from_yaml() - - return Files(self.project_path, file_selection.included_files) diff --git a/gpt_engineer/applications/interactive_cli_loop/files.py b/gpt_engineer/applications/interactive_cli_loop/files.py deleted file mode 100644 index b1e7347129..0000000000 --- a/gpt_engineer/applications/interactive_cli_loop/files.py +++ /dev/null @@ -1,40 +0,0 @@ -from pathlib import Path - -from gpt_engineer.core.files_dict import FilesDict - - -class Files(FilesDict): - def __init__(self, project_path: str, selected_files: list): - """ - Initialize the Files object by reading the content of the provided file paths. - - Parameters - ---------- - project_path : str - The base path of the project. - selected_files : list - List of file paths relative to the project path. - """ - - self.project_path = project_path - # Convert the list of selected files and their relative directory into a dictionary of relative file paths - content_dict = {} - for file_path in selected_files: - try: - with open( - Path(project_path) / file_path, "r", encoding="utf-8" - ) as content: - content_dict[str(file_path)] = content.read() - except FileNotFoundError: - print(f"Warning: File not found {file_path}") - except UnicodeDecodeError: - print(f"Warning: File not UTF-8 encoded {file_path}, skipping") - super().__init__(content_dict) - - def write_to_disk(self, files: FilesDict): - for name, content in files.items(): - path = Path(self.project_path) / name - path.parent.mkdir(parents=True, exist_ok=True) - with open(path, "w") as f: - f.write(content) - return self diff --git a/gpt_engineer/applications/interactive_cli_loop/generation_tools.py b/gpt_engineer/applications/interactive_cli_loop/generation_tools.py deleted file mode 100644 index af6c959bef..0000000000 --- a/gpt_engineer/applications/interactive_cli_loop/generation_tools.py +++ /dev/null @@ -1,483 +0,0 @@ -import xml.etree.ElementTree as ET -import json - -from gpt_engineer.applications.interactive_cli_loop.domain import FileSelection -from gpt_engineer.core.ai import AI - -from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler - - -def generate_branch_name(ai: AI, feature_description: str) -> str: - system_prompt = """ - You are a branch name autocomplete / suggestion tool. Based on the users input, please respond with a single suggestion of a branch name and notthing else. - - Example: - - Input: I want to add a login button - Output: feature/login-button - """ - - ai.llm.callbacks.clear() # silent - - messages = ai.start(system_prompt, feature_description, step_name="name-branch") - - ai.llm.callbacks.append(StreamingStdOutCallbackHandler()) - - return messages[-1].content.strip() - - -class TaskResponse: - def __init__(self, planning_thoughts, tasks, closing_remarks): - self.planning_thoughts = planning_thoughts - self.tasks = tasks - self.closing_remarks = closing_remarks - - def __str__(self): - return f"Planning Thoughts: {self.planning_thoughts}\nTasks: {'; '.join(self.tasks)}\nClosing Remarks: {self.closing_remarks}" - - -def parse_task_xml_to_class(xml_data): - # Parse the XML data - root = ET.fromstring(xml_data) - - # Extract the planning thoughts - planning_thoughts = root.find("PlanningThoughts").text.strip() - - # Extract tasks - tasks = [task.text.strip() for task in root.findall(".//Task")] - - # Extract closing remarks - closing_remarks = root.find("ClosingRemarks").text.strip() - - # Create an instance of the response class - response = TaskResponse(planning_thoughts, tasks, closing_remarks) - - return response - - -def build_git_context_string(git_context): - return f"""## Git Context - these are the code changes made so far while implementing this feature. This may include work completed by you on previous tasks as well as changes made independently by me. -### Branch Changes - this is the cumulative diff of all the commits so far on the feature branch. -{git_context.branch_changes} - -### Staged Changes - this is the diff of the current staged changes. -{git_context.staged_changes}""" - - -def build_feature_context_string(feature, git_context): - feature_string = f"""## Feature - this is the description fo the current feature we are working on. -{feature.get_description()} - -## Completed Tasks - these are the lists of tasks you have completed so far on the feature branch. -{feature.get_progress()["done"]} -""" - - if git_context: - return f"""{feature_string} - -{build_git_context_string(git_context)} -""" - - return feature_string - - -def build_files_context_string(feature, git_context, files): - return f"""{build_feature_context_string(feature, git_context)} - -## Current Codebase - this is the as is view of the current code base including any unstaged changes. -{files.to_chat()} -""" - - -def generate_suggested_tasks(ai: AI, feature, git_context, files) -> str: - system_prompt = """ -You are a software engineer work planning tool. Given a feature description, a list of tasks already completed, and sections of the code -repository we are working on, suggest a list of implementation tasks to be done in order to move towards the end goal of completing the feature. - -An implementation task consists of actually writing some code - and doesnt include review or research tasks, or any other activity other tha writing code. - -First start by outputting your planning thoughts: an overview of what we are trying to achieve, what we have achieved so far, and what implementation tasks are left to be done. - -Then output the list of between 0 and 3 implementation tasks to be done which get us closer to our goal. Please try to keep the tasks small, actionable and independantly commitable. - -We only need to move towards our goal with these tasks, we dont have to complete the feature in these 3 steps. - -The output format will be XML as follows: - - - - - - - - - - - - - - - - - - - - - -Respond in XML and nothing else. - -You may send as as little as 0 tasks and as many as 3. If you believe the feature is complete, send 0 tasks. -""" - - input = build_files_context_string(feature, git_context, files) - - ai.llm.callbacks.clear() # silent - - messages = ai.start(system_prompt, input, step_name="suggest-tasks") - - ai.llm.callbacks.append(StreamingStdOutCallbackHandler()) - - xml = messages[-1].content.strip() - - return parse_task_xml_to_class(xml) - - -def fuzzy_parse_file_selection(ai: AI, yaml_string: str) -> FileSelection: - # todo: load prompt from ptompts/fuzzy_file_parser - - system_prompt = """## Explanation -You are a fuzzy yaml parser, who correctly parses yaml even if it is not strictly valid. - -A user has been given a yaml representation of a file structure, represented like so: - -.github: - ISSUE_TEMPLATE: - - bug-report.md - - documentation-clarification.md - - feature-request.md - PULL_REQUEST_TEMPLATE: - - PULL_REQUEST_TEMPLATE.md - workflows: - - automation.yml - - ci.yaml - - pre-commit.yaml - - release.yaml - (./): - - CODEOWNERS - - CODE_OF_CONDUCT.md - - CONTRIBUTING.md - - FUNDING.yml - -Folders are represented as keys in a dictionary, files are items in a list. Any files listed under the (./) key can be assumed to be files of the folder above that. - -The given example maps to these file paths: - -".github/ISSUE_TEMPLATE/bug-report.md", -".github/ISSUE_TEMPLATE/documentation-clarification.md", -".github/ISSUE_TEMPLATE/feature-request.md", -".github/PULL_REQUEST_TEMPLATE/PULL_REQUEST_TEMPLATE.md", -".github/workflows/automation.yml", -".github/workflows/ci.yaml", -".github/workflows/pre-commit.yaml", -".github/workflows/release.yaml", -".github/CODEOWNERS", -".github/CODE_OF_CONDUCT.md", -".github/CONTRIBUTING.md", -".github/FUNDING.yml", - -An example of the yaml file after commenting might be something like this: - - -.github: - # ISSUE_TEMPLATE: - # - bug-report.md - # - documentation-clarification.md - # - feature-request.md - # PULL_REQUEST_TEMPLATE: - # - PULL_REQUEST_TEMPLATE.md - workflows: - - automation.yml - - ci.yaml - - pre-commit.yaml - - release.yaml - # (./): - # - CODEOWNERS - - CODE_OF_CONDUCT.md - - CONTRIBUTING.md - # - FUNDING.yml - - -This would convert into: - -{ - "included_files": [ - ".github/workflows/automation.yml", - ".github/workflows/ci.yaml", - ".github/workflows/pre-commit.yaml", - ".github/workflows/release.yaml", - ".github/CODE_OF_CONDUCT.md", - ".github/CONTRIBUTING.md" - ], - "excluded_files": [ - ".github/ISSUE_TEMPLATE/bug-report.md", - ".github/ISSUE_TEMPLATE/documentation-clarification.md", - ".github/ISSUE_TEMPLATE/feature-request.md", - ".github/PULL_REQUEST_TEMPLATE/PULL_REQUEST_TEMPLATE.md", - ".github/CODEOWNERS", - ".github/FUNDING.yml" - ] -} - - -Although the commmented content wasnt strictly correct yaml, their intentions were clear. They wanted to retain the files in the workflow folder aswell as the code of conduct and contributing guides - -Based on commented yaml inputs such as this, your job is to output JSON, indicating which files have been included and which have been excluded. - -Excluded files are always commented out with a # like in the above example. - -The json you should return will be like this: - -{ - "included_files": [ - "folder1/file5", - "folder1/folder3/file3", - "file7" - ], - "excluded_files": [ - "folder1/folder2/file1", - "folder1/folder2/file2", - "folder1/folder3/file4", - "folder1/file5", - ] -} - -Files can only be included or excluded, not both. If you are confused about the state of a file make your best guess - and if you really arent sure then mark it as included. - -Respond in JSON and nothing else. - -## Examples - -Example 1: - -Input: - -.github: - ISSUE_TEMPLATE: - - bug_report.md - - feature_request.md - PULL_REQUEST_TEMPLATE: - - pull_request_template.md - # workflows: - # - ci.yml - # - release.yml - -Output: - -{ - "included_files": [ - ".github/ISSUE_TEMPLATE/bug_report.md", - ".github/ISSUE_TEMPLATE/feature_request.md", - ".github/PULL_REQUEST_TEMPLATE/pull_request_template.md" - ], - "excluded_files": [ - ".github/workflows/ci.yml", - ".github/workflows/release.yml" - ] -} - -Example 2: - -Input: - -source: - # controllers: - # - MainController.cs - # - AuthController.cs - models: - - User.cs - - Post.cs - views: - Home: - - Index.cshtml - # - About.cshtml - Auth: - - Login.cshtml - - Register.cshtml - (./): - - Dockerfile - -Output: - -{ - "included_files": [ - "source/models/User.cs", - "source/models/Post.cs", - "source/views/Home/Index.cshtml", - "source/views/Auth/Login.cshtml", - "source/views/Auth/Register.cshtml" - "source/Dockerfile", - ], - "excluded_files": [ - "source/controllers/MainController.cs", - "source/controllers/AuthController.cs", - "source/views/Home/About.cshtml" - ] -} - -Example 3: - -Input: - -src: - main: - java: - com: - example: - # controllers: - # - UserController.java - # - PostController.java - models: - - User.java - - Post.java - # repositories: - # - UserRepository.java - # - PostRepository.java - services: - - UserService.java - - PostService.java - resources: - - application.properties - test: - java: - com: - example: - controllers: - - UserControllerTest.java - - PostControllerTest.java - (./): - - pom.xml - - Dockerfile - -Output: - -{ - "included_files": [ - "src/main/java/com/example/models/User.java", - "src/main/java/com/example/models/Post.java", - "src/main/java/com/example/services/UserService.java", - "src/main/java/com/example/services/PostService.java", - "src/main/resources/application.properties", - "src/test/java/com/example/controllers/UserControllerTest.java", - "src/test/java/com/example/controllers/PostControllerTest.java", - "pom.xml", - "Dockerfile" - ], - "excluded_files": [ - "src/main/java/com/example/controllers/UserController.java", - "src/main/java/com/example/controllers/PostController.java", - "src/main/java/com/example/repositories/UserRepository.java", - "src/main/java/com/example/repositories/PostRepository.java" - ] -} - -Example 4: - -Input: - - -app: - # controllers: - # - application_controller.rb - # - users_controller.rb - models: - - user.rb - - post.rb - views: - layouts: - - application.html.erb - users: - - index.html.erb - - show.html.erb - posts: - - index.html.erb - # - show.html.erb - (./): - - Gemfile - - config -config: - environments: - - development.rb - - test.rb - # - production.rb - initializers: - - application_controller_renderer.rb - locales: - - en.yml - # routes.rb -db: - migrate: - - 20211025120523_create_users.rb - - 20211025120530_create_posts.rb -test: - fixtures: - - users.yml - - posts.yml - # controllers: - # - users_controller_test.rb - # - posts_controller_test.rb - models: - - user_test.rb - - post_test.rb - - -Output: - -{ - "included_files": [ - "app/models/user.rb", - "app/models/post.rb", - "app/views/layouts/application.html.erb", - "app/views/users/index.html.erb", - "app/views/users/show.html.erb", - "app/views/posts/index.html.erb", - "app/Gemfile", - "config/environments/development.rb", - "config/environments/test.rb", - "config/initializers/application_controller_renderer.rb", - "config/locales/en.yml", - "db/migrate/20211025120523_create_users.rb", - "db/migrate/20211025120530_create_posts.rb", - "test/fixtures/users.yml", - "test/fixtures/posts.yml", - "test/models/user_test.rb", - "test/models/post_test.rb" - ], - "excluded_files": [ - "app/controllers/application_controller.rb", - "app/controllers/users_controller.rb", - "app/views/posts/show.html.erb", - "config/environments/production.rb", - "config/routes.rb", - "test/controllers/users_controller_test.rb", - "test/controllers/posts_controller_test.rb" - ] -} - -## IMPORTANT -Remember any line that is commented is an excluded file. Any line that is NOT commented - is an included file. -""" - - # ai.llm.callbacks.clear() # silent - - messages = ai.start(system_prompt, yaml_string, step_name="fuzzy-parse-yaml") - - # ai.llm.callbacks.append(StreamingStdOutCallbackHandler()) - - json_string = messages[-1].content.strip() - - # strip anything before first { and after last } - json_string = json_string[json_string.find("{") : json_string.rfind("}") + 1] - - data = json.loads(json_string) - - return FileSelection(data["included_files"], data["excluded_files"]) diff --git a/gpt_engineer/applications/interactive_cli_loop/main.py b/gpt_engineer/applications/interactive_cli_loop/main.py deleted file mode 100644 index 0a92d843a2..0000000000 --- a/gpt_engineer/applications/interactive_cli_loop/main.py +++ /dev/null @@ -1,147 +0,0 @@ -import typer -from dotenv import load_dotenv -from pathlib import Path -from gpt_engineer.core.default.paths import memory_path - -from gpt_engineer.applications.interactive_cli_loop.agents.task_agent import TaskAgent -from gpt_engineer.applications.interactive_cli_loop.agents.feature_agent import ( - FeatureAgent, -) -from gpt_engineer.applications.interactive_cli_loop.agents.chat_agent import ChatAgent -from gpt_engineer.applications.interactive_cli_loop.feature import Feature -from gpt_engineer.applications.interactive_cli_loop.task import Task -from gpt_engineer.applications.interactive_cli_loop.repository import Repository -from gpt_engineer.applications.interactive_cli_loop.domain import Settings -from gpt_engineer.applications.interactive_cli_loop.file_selection import FileSelector - - -from gpt_engineer.core.ai import AI - -app = typer.Typer() - - -@app.command() -def feature( - new: bool = typer.Option(False, "--new", "-n", help="Initialize a new feature."), - project_path: str = typer.Option(".", "--path", "-p", help="Path to the project."), - model: str = typer.Option("gpt-4o", "--model", "-m", help="Model ID string."), - temperature: float = typer.Option( - 0.1, - "--temperature", - "-t", - help="Controls randomness: lower values for more focused, deterministic outputs.", - ), - azure_endpoint: str = typer.Option( - "", - "--azure", - "-a", - help="""Endpoint for your Azure OpenAI Service (https://xx.openai.azure.com). - In that case, the given model is the deployment name chosen in the Azure AI Studio.""", - ), - verbose: bool = typer.Option( - False, "--verbose", "-v", help="Enable verbose logging for debugging." - ), - debug: bool = typer.Option( - False, "--debug", "-d", help="Enable debug mode for debugging." - ), - no_branch: bool = typer.Option( - False, - "--no-branch", - "-nb", - help="Do not create a new feature branch for this work.", - ), -): - """ - Handle features in the project. - """ - load_dotenv() - - ai = AI( - model_name=model, - temperature=temperature, - ) - - repository = Repository(project_path) - - feature = Feature(project_path, repository) - - file_selector = FileSelector(project_path, repository) - - agent = FeatureAgent(ai, project_path, feature, repository, file_selector) - - settings = Settings(no_branch) - - if new: - agent.init(settings) - else: - agent.resume(settings) - - -@app.command() -def chat( - project_path: str = typer.Option(".", "--path", "-p", help="Path to the project."), - model: str = typer.Option("gpt-4o", "--model", "-m", help="Model ID string."), - temperature: float = typer.Option( - 0.8, - "--temperature", - "-t", - help="Controls randomness: lower values for more focused, deterministic outputs.", - ), -): - """ - Initiate a chat about the current repository and feature context - """ - ai = AI( - model_name=model, - temperature=temperature, - ) - - repository = Repository(project_path) - - feature = Feature(project_path, repository) - - file_selector = FileSelector(project_path, repository) - - chat_agent = ChatAgent(ai, project_path, feature, repository, file_selector) - - chat_agent.start() - - -if __name__ == "__main__": - app() - - -@app.command() -def task( - project_path: str = typer.Option(".", "--path", "-p", help="Path to the project."), - model: str = typer.Option("gpt-4o", "--model", "-m", help="Model ID string."), - temperature: float = typer.Option( - 0.1, - "--temperature", - "-t", - help="Controls randomness: lower values for more focused, deterministic outputs.", - ), -): - """ - Implement a simple one off task without feature context - """ - load_dotenv() - - ai = AI( - model_name=model, - temperature=temperature, - ) - - repository = Repository(project_path) - - task = Task(project_path) - - file_selector = FileSelector(project_path, repository) - - task_agent = TaskAgent(ai, project_path, task, repository, file_selector) - - task_agent.run() - - # review - - # task.delete() diff --git a/gpt_engineer/applications/interactive_cli_loop/prompts/__init__.py b/gpt_engineer/applications/interactive_cli_loop/prompts/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/gpt_engineer/applications/interactive_cli_loop/prompts/fuzzy_file_parser b/gpt_engineer/applications/interactive_cli_loop/prompts/fuzzy_file_parser deleted file mode 100644 index 75200759b1..0000000000 --- a/gpt_engineer/applications/interactive_cli_loop/prompts/fuzzy_file_parser +++ /dev/null @@ -1,320 +0,0 @@ -## Explanation -You are a fuzzy yaml parser, who correctly parses yaml even if it is not strictly valid. - -A user has been given a yaml representation of a file structure, represented like so: - -.github: - ISSUE_TEMPLATE: - - bug-report.md - - documentation-clarification.md - - feature-request.md - PULL_REQUEST_TEMPLATE: - - PULL_REQUEST_TEMPLATE.md - workflows: - - automation.yml - - ci.yaml - - pre-commit.yaml - - release.yaml - (./): - - CODEOWNERS - - CODE_OF_CONDUCT.md - - CONTRIBUTING.md - - FUNDING.yml - -Folders are represented as keys in a dictionary, files are items in a list. Any files listed under the (./) key can be assumed to be files of the folder above that. - -The given example maps to these file paths: - -".github/ISSUE_TEMPLATE/bug-report.md", -".github/ISSUE_TEMPLATE/documentation-clarification.md", -".github/ISSUE_TEMPLATE/feature-request.md", -".github/PULL_REQUEST_TEMPLATE/PULL_REQUEST_TEMPLATE.md", -".github/workflows/automation.yml", -".github/workflows/ci.yaml", -".github/workflows/pre-commit.yaml", -".github/workflows/release.yaml", -".github/CODEOWNERS", -".github/CODE_OF_CONDUCT.md", -".github/CONTRIBUTING.md", -".github/FUNDING.yml", - -An example of the yaml file after commenting might be something like this: - - -.github: - # ISSUE_TEMPLATE: - # - bug-report.md - # - documentation-clarification.md - # - feature-request.md - # PULL_REQUEST_TEMPLATE: - # - PULL_REQUEST_TEMPLATE.md - workflows: - - automation.yml - - ci.yaml - - pre-commit.yaml - - release.yaml - # (./): - # - CODEOWNERS - - CODE_OF_CONDUCT.md - - CONTRIBUTING.md - # - FUNDING.yml - - -This would convert into: - -{ - "included_files": [ - ".github/workflows/automation.yml", - ".github/workflows/ci.yaml", - ".github/workflows/pre-commit.yaml", - ".github/workflows/release.yaml", - ".github/CODE_OF_CONDUCT.md", - ".github/CONTRIBUTING.md" - ], - "excluded_files": [ - ".github/ISSUE_TEMPLATE/bug-report.md", - ".github/ISSUE_TEMPLATE/documentation-clarification.md", - ".github/ISSUE_TEMPLATE/feature-request.md", - ".github/PULL_REQUEST_TEMPLATE/PULL_REQUEST_TEMPLATE.md", - ".github/CODEOWNERS", - ".github/FUNDING.yml" - ] -} - - -Although the commmented content wasnt strictly correct yaml, their intentions were clear. They wanted to retain the files in the workflow folder aswell as the code of conduct and contributing guides - -Based on commented yaml inputs such as this, your job is to output JSON, indicating which files have been included and which have been excluded. - -Excluded files are always commented out with a # like in the above example. - -The json you should return will be like this: - -{ - "included_files": [ - "folder1/file5", - "folder1/folder3/file3", - "file7" - ], - "excluded_files": [ - "folder1/folder2/file1", - "folder1/folder2/file2", - "folder1/folder3/file4", - "folder1/file5", - ] -} - -Files can only be included or excluded, not both. If you are confused about the state of a file make your best guess - and if you really arent sure then mark it as included. - -Respond in JSON and nothing else. - -## Examples - -Example 1: - -Input: - -.github: - ISSUE_TEMPLATE: - - bug_report.md - - feature_request.md - PULL_REQUEST_TEMPLATE: - - pull_request_template.md - # workflows: - # - ci.yml - # - release.yml - -Output: - -{ - "included_files": [ - ".github/ISSUE_TEMPLATE/bug_report.md", - ".github/ISSUE_TEMPLATE/feature_request.md", - ".github/PULL_REQUEST_TEMPLATE/pull_request_template.md" - ], - "excluded_files": [ - ".github/workflows/ci.yml", - ".github/workflows/release.yml" - ] -} - -Example 2: - -Input: - -source: - # controllers: - # - MainController.cs - # - AuthController.cs - models: - - User.cs - - Post.cs - views: - Home: - - Index.cshtml - # - About.cshtml - Auth: - - Login.cshtml - - Register.cshtml - (./): - - Dockerfile - -Output: - -{ - "included_files": [ - "source/models/User.cs", - "source/models/Post.cs", - "source/views/Home/Index.cshtml", - "source/views/Auth/Login.cshtml", - "source/views/Auth/Register.cshtml" - "source/Dockerfile", - ], - "excluded_files": [ - "source/controllers/MainController.cs", - "source/controllers/AuthController.cs", - "source/views/Home/About.cshtml" - ] -} - -Example 3: - -Input: - -src: - main: - java: - com: - example: - # controllers: - # - UserController.java - # - PostController.java - models: - - User.java - - Post.java - # repositories: - # - UserRepository.java - # - PostRepository.java - services: - - UserService.java - - PostService.java - resources: - - application.properties - test: - java: - com: - example: - controllers: - - UserControllerTest.java - - PostControllerTest.java - (./): - - pom.xml - - Dockerfile - -Output: - -{ - "included_files": [ - "src/main/java/com/example/models/User.java", - "src/main/java/com/example/models/Post.java", - "src/main/java/com/example/services/UserService.java", - "src/main/java/com/example/services/PostService.java", - "src/main/resources/application.properties", - "src/test/java/com/example/controllers/UserControllerTest.java", - "src/test/java/com/example/controllers/PostControllerTest.java", - "pom.xml", - "Dockerfile" - ], - "excluded_files": [ - "src/main/java/com/example/controllers/UserController.java", - "src/main/java/com/example/controllers/PostController.java", - "src/main/java/com/example/repositories/UserRepository.java", - "src/main/java/com/example/repositories/PostRepository.java" - ] -} - -Example 4: - -Input: - - -app: - # controllers: - # - application_controller.rb - # - users_controller.rb - models: - - user.rb - - post.rb - views: - layouts: - - application.html.erb - users: - - index.html.erb - - show.html.erb - posts: - - index.html.erb - # - show.html.erb - (./): - - Gemfile - - config -config: - environments: - - development.rb - - test.rb - # - production.rb - initializers: - - application_controller_renderer.rb - locales: - - en.yml - # routes.rb -db: - migrate: - - 20211025120523_create_users.rb - - 20211025120530_create_posts.rb -test: - fixtures: - - users.yml - - posts.yml - # controllers: - # - users_controller_test.rb - # - posts_controller_test.rb - models: - - user_test.rb - - post_test.rb - - -Output: - -{ - "included_files": [ - "app/models/user.rb", - "app/models/post.rb", - "app/views/layouts/application.html.erb", - "app/views/users/index.html.erb", - "app/views/users/show.html.erb", - "app/views/posts/index.html.erb", - "app/Gemfile", - "config/environments/development.rb", - "config/environments/test.rb", - "config/initializers/application_controller_renderer.rb", - "config/locales/en.yml", - "db/migrate/20211025120523_create_users.rb", - "db/migrate/20211025120530_create_posts.rb", - "test/fixtures/users.yml", - "test/fixtures/posts.yml", - "test/models/user_test.rb", - "test/models/post_test.rb" - ], - "excluded_files": [ - "app/controllers/application_controller.rb", - "app/controllers/users_controller.rb", - "app/views/posts/show.html.erb", - "config/environments/production.rb", - "config/routes.rb", - "test/controllers/users_controller_test.rb", - "test/controllers/posts_controller_test.rb" - ] -} - -## IMPORTANT -Remember any line that is commented is an excluded file. Any line that is NOT commented - is an included file. \ No newline at end of file diff --git a/gpt_engineer/applications/interactive_cli_loop/repository.py b/gpt_engineer/applications/interactive_cli_loop/repository.py deleted file mode 100644 index f6d7d9c054..0000000000 --- a/gpt_engineer/applications/interactive_cli_loop/repository.py +++ /dev/null @@ -1,151 +0,0 @@ -from dataclasses import dataclass -from typing import List - -from git import GitCommandError, Repo - - -@dataclass -class Commit: - """ - Represents a single Git commit with a description and a diff. - """ - - description: str - diff: str - - def __str__(self) -> str: - diff_str = "\n".join(str(d) for d in self.diff) - return f"Commit Description: {self.description}\nDiff:\n{diff_str}" - - -@dataclass -class GitContext: - """ - Represents the Git context of an in progress feature. - """ - - commits: List[Commit] - branch_changes: str - staged_changes: str - unstaged_changes: str - tracked_files: List[str] - - -class Repository: - """ - Manages a git repository, providing functionalities to get repo status, - list files considering .gitignore, and interact with repository history. - """ - - def __init__(self, repo_path: str): - self.repo_path = repo_path - self.repo = Repo(repo_path) - assert not self.repo.bare - - def get_tracked_files(self) -> List[str]: - """ - List all files that are currently tracked by Git in the repository. - """ - try: - tracked_files = self.repo.git.ls_files().split("\n") - return tracked_files - except GitCommandError as e: - print(f"Error listing tracked files: {e}") - return [] - - def get_feature_branch_diff(self): - """ - Get a consolidated diff for the entire feature branch from its divergence point. - - Returns: - - str: The diff representing all changes from the feature branch since its divergence. - """ - current_branch = self.repo.active_branch - - # Get the tracking branch (e.g., 'origin/master') - tracking_branch = current_branch.tracking_branch() - if tracking_branch is None: - print("No tracking branch set, using 'master' as default base branch.") - tracking_branch = self.repo.heads.master # Fallback to 'master' - - try: - # Find the merge base between the current branch and the tracking branch or master - merge_base = self.repo.merge_base(tracking_branch, current_branch) - if merge_base: - merge_base = merge_base[ - 0 - ] # GitPython might return a list of merge bases - - # Generate the diff from the merge base to the latest commit of the feature branch - feature_diff = self.repo.git.diff( - f"{merge_base}..{current_branch}", unified=0 - ) - return feature_diff - except GitCommandError as e: - print(f"Error generating diff: {e}") - return "" - - def get_unstaged_changes(self): - """ - Get the unstaged changes in the repository. - - Returns - ------- - str - The unstaged changes in the repository. - """ - return self.repo.git.diff() - - def get_git_context(self): - staged_changes = self.repo.git.diff("--cached") - unstaged_changes = self.repo.git.diff() - current_branch = self.repo.active_branch - - commits = list(self.repo.iter_commits(rev=current_branch.name)) - - commit_objects = [ - Commit( - commit.summary, - ( - commit.diff(commit.parents[0], create_patch=True) - if commit.parents - else commit.diff(None, create_patch=True) - ), - ) - for commit in commits - ] - - branch_changes = self.get_feature_branch_diff() - - tracked_files = self.get_tracked_files() - - return GitContext( - commit_objects, - branch_changes, - staged_changes, - unstaged_changes, - tracked_files, - ) - - def create_branch(self, branch_name): - """ - Create a new branch in the repository. - - Parameters - ---------- - branch_name : str - The name of the new branch. - """ - self.repo.git.checkout("-b", branch_name) - - def stage_all_changes(self): - """ - Stage all changes in the repository. - """ - self.repo.git.add("--all") - - def undo_unstaged_changes(self): - """ - Undo all unstaged changes in the repository. - """ - self.repo.git.checkout("--", ".") diff --git a/gpt_engineer/applications/interactive_cli_loop/task.py b/gpt_engineer/applications/interactive_cli_loop/task.py deleted file mode 100644 index a4f46aa89c..0000000000 --- a/gpt_engineer/applications/interactive_cli_loop/task.py +++ /dev/null @@ -1,80 +0,0 @@ -import json -import os -import platform -import subprocess -import shutil -from pathlib import Path -from typing import Union - -from gpt_engineer.core.default.disk_memory import DiskMemory -from gpt_engineer.core.default.paths import memory_path -from gpt_engineer.applications.interactive_cli_loop.file_selection import FileSelector -from gpt_engineer.applications.interactive_cli_loop.repository import Repository - - -class Task(DiskMemory): - """ - Represents a task that will be done one off without the wider context of a feature - """ - - def __init__(self, project_path: Union[str, Path]): - - self._task_path = Path(memory_path(project_path)) / "task" - self.path = self._task_path - self._task_filename = "task.md" - self._files_filename = "files.yml" - - if not os.path.exists(self._task_path): - os.makedirs(self._task_path) - - self.set_task("Please replace with task description") - - super().__init__(self._task_path) - - def delete(self): - shutil.rmtree(self._task_path) - - def set_task(self, task: str): - """ - Updates the task file with new text. - - Parameters - ---------- - task : str - The new task to write to the feature file. - """ - super().__setitem__(self._task_filename, task) - - def get_task(self) -> str: - """ - Retrieve the content of the task file in the database. - - Returns - ------- - str - The content of the feature file. - """ - return super().__getitem__(self._task_filename) - - def _file_path(self, filename): - return self._task_path / filename - - def _open_file_in_editor(self, path): - """ - Opens the generated YAML file in the default system editor. - If the YAML file is empty or doesn't exist, generate it first. - """ - - # Platform-specific methods to open the file - if platform.system() == "Windows": - os.startfile(path) - elif platform.system() == "Darwin": - subprocess.run(["open", path]) - else: # Linux and other Unix-like systems - subprocess.run(["xdg-open", path]) - - def open_task_in_editor(self): - """ - Opens the task file in the default system editor. - """ - self._open_file_in_editor(self._file_path(self._task_filename)) From e2da9c7b51b6284fa94442071a4fbca13a9d7dd2 Mon Sep 17 00:00:00 2001 From: Theo McCabe Date: Tue, 11 Jun 2024 11:09:58 +0100 Subject: [PATCH 34/36] feature cli --- .../__init__.py | 0 .../agents/__init__.py | 0 .../agents/agent_steps.py | 8 +-- .../agents/chat_agent.py | 12 ++--- .../agents/feature_agent.py | 8 +-- .../domain.py | 0 .../applications/feature_cli/example_project | 1 + .../feature.py | 0 .../file_selection.py | 0 .../{interactive_cli => feature_cli}/files.py | 0 .../generation_tools.py | 0 .../{interactive_cli => feature_cli}/main.py | 14 +++--- .../prompts/__init__.py | 0 .../prompts/fuzzy_file_parser | 0 .../repository.py | 0 .../{interactive_cli => feature_cli}/task.py | 0 .../example_project/.gitignore | 4 -- .../interactive_cli/example_project/README.md | 8 --- .../example_project/index.html | 42 ---------------- .../example_project/styles.css | 49 ------------------- pyproject.toml | 2 +- 21 files changed, 23 insertions(+), 125 deletions(-) rename gpt_engineer/applications/{interactive_cli => feature_cli}/__init__.py (100%) rename gpt_engineer/applications/{interactive_cli => feature_cli}/agents/__init__.py (100%) rename gpt_engineer/applications/{interactive_cli => feature_cli}/agents/agent_steps.py (97%) rename gpt_engineer/applications/{interactive_cli => feature_cli}/agents/chat_agent.py (81%) rename gpt_engineer/applications/{interactive_cli => feature_cli}/agents/feature_agent.py (89%) rename gpt_engineer/applications/{interactive_cli => feature_cli}/domain.py (100%) create mode 160000 gpt_engineer/applications/feature_cli/example_project rename gpt_engineer/applications/{interactive_cli => feature_cli}/feature.py (100%) rename gpt_engineer/applications/{interactive_cli => feature_cli}/file_selection.py (100%) rename gpt_engineer/applications/{interactive_cli => feature_cli}/files.py (100%) rename gpt_engineer/applications/{interactive_cli => feature_cli}/generation_tools.py (100%) rename gpt_engineer/applications/{interactive_cli => feature_cli}/main.py (85%) rename gpt_engineer/applications/{interactive_cli => feature_cli}/prompts/__init__.py (100%) rename gpt_engineer/applications/{interactive_cli => feature_cli}/prompts/fuzzy_file_parser (100%) rename gpt_engineer/applications/{interactive_cli => feature_cli}/repository.py (100%) rename gpt_engineer/applications/{interactive_cli => feature_cli}/task.py (100%) delete mode 100644 gpt_engineer/applications/interactive_cli/example_project/.gitignore delete mode 100644 gpt_engineer/applications/interactive_cli/example_project/README.md delete mode 100644 gpt_engineer/applications/interactive_cli/example_project/index.html delete mode 100644 gpt_engineer/applications/interactive_cli/example_project/styles.css diff --git a/gpt_engineer/applications/interactive_cli/__init__.py b/gpt_engineer/applications/feature_cli/__init__.py similarity index 100% rename from gpt_engineer/applications/interactive_cli/__init__.py rename to gpt_engineer/applications/feature_cli/__init__.py diff --git a/gpt_engineer/applications/interactive_cli/agents/__init__.py b/gpt_engineer/applications/feature_cli/agents/__init__.py similarity index 100% rename from gpt_engineer/applications/interactive_cli/agents/__init__.py rename to gpt_engineer/applications/feature_cli/agents/__init__.py diff --git a/gpt_engineer/applications/interactive_cli/agents/agent_steps.py b/gpt_engineer/applications/feature_cli/agents/agent_steps.py similarity index 97% rename from gpt_engineer/applications/interactive_cli/agents/agent_steps.py rename to gpt_engineer/applications/feature_cli/agents/agent_steps.py index e4e268e9ec..9fe5ef4d9d 100644 --- a/gpt_engineer/applications/interactive_cli/agents/agent_steps.py +++ b/gpt_engineer/applications/feature_cli/agents/agent_steps.py @@ -1,10 +1,10 @@ -from gpt_engineer.applications.interactive_cli.feature import Feature -from gpt_engineer.applications.interactive_cli.file_selection import FileSelector -from gpt_engineer.applications.interactive_cli.repository import ( +from gpt_engineer.applications.feature_cli.feature import Feature +from gpt_engineer.applications.feature_cli.file_selection import FileSelector +from gpt_engineer.applications.feature_cli.repository import ( Repository, GitContext, ) -from gpt_engineer.applications.interactive_cli.generation_tools import ( +from gpt_engineer.applications.feature_cli.generation_tools import ( generate_branch_name, build_feature_context_string, generate_suggested_tasks, diff --git a/gpt_engineer/applications/interactive_cli/agents/chat_agent.py b/gpt_engineer/applications/feature_cli/agents/chat_agent.py similarity index 81% rename from gpt_engineer/applications/interactive_cli/agents/chat_agent.py rename to gpt_engineer/applications/feature_cli/agents/chat_agent.py index 247d5be7ba..b7cad145a9 100644 --- a/gpt_engineer/applications/interactive_cli/agents/chat_agent.py +++ b/gpt_engineer/applications/feature_cli/agents/chat_agent.py @@ -1,13 +1,13 @@ from gpt_engineer.core.ai import AI, HumanMessage, SystemMessage -from gpt_engineer.applications.interactive_cli.feature import Feature -from gpt_engineer.applications.interactive_cli.repository import Repository -from gpt_engineer.applications.interactive_cli.files import Files -from gpt_engineer.applications.interactive_cli.file_selection import FileSelector -from gpt_engineer.applications.interactive_cli.agents.agent_steps import ( +from gpt_engineer.applications.feature_cli.feature import Feature +from gpt_engineer.applications.feature_cli.repository import Repository +from gpt_engineer.applications.feature_cli.files import Files +from gpt_engineer.applications.feature_cli.file_selection import FileSelector +from gpt_engineer.applications.feature_cli.agents.agent_steps import ( update_user_file_selection, ) -from gpt_engineer.applications.interactive_cli.generation_tools import ( +from gpt_engineer.applications.feature_cli.generation_tools import ( build_files_context_string, ) diff --git a/gpt_engineer/applications/interactive_cli/agents/feature_agent.py b/gpt_engineer/applications/feature_cli/agents/feature_agent.py similarity index 89% rename from gpt_engineer/applications/interactive_cli/agents/feature_agent.py rename to gpt_engineer/applications/feature_cli/agents/feature_agent.py index b7797d8468..df16f4f2d9 100644 --- a/gpt_engineer/applications/interactive_cli/agents/feature_agent.py +++ b/gpt_engineer/applications/feature_cli/agents/feature_agent.py @@ -1,7 +1,7 @@ -from gpt_engineer.applications.interactive_cli.feature import Feature -from gpt_engineer.applications.interactive_cli.repository import Repository -from gpt_engineer.applications.interactive_cli.file_selection import FileSelector -from gpt_engineer.applications.interactive_cli.agents.agent_steps import ( +from gpt_engineer.applications.feature_cli.feature import Feature +from gpt_engineer.applications.feature_cli.repository import Repository +from gpt_engineer.applications.feature_cli.file_selection import FileSelector +from gpt_engineer.applications.feature_cli.agents.agent_steps import ( initialize_new_feature, update_user_file_selection, print_feature_state, diff --git a/gpt_engineer/applications/interactive_cli/domain.py b/gpt_engineer/applications/feature_cli/domain.py similarity index 100% rename from gpt_engineer/applications/interactive_cli/domain.py rename to gpt_engineer/applications/feature_cli/domain.py diff --git a/gpt_engineer/applications/feature_cli/example_project b/gpt_engineer/applications/feature_cli/example_project new file mode 160000 index 0000000000..b22fbe6c76 --- /dev/null +++ b/gpt_engineer/applications/feature_cli/example_project @@ -0,0 +1 @@ +Subproject commit b22fbe6c760ac196edacdfb508ad300d033e12d9 diff --git a/gpt_engineer/applications/interactive_cli/feature.py b/gpt_engineer/applications/feature_cli/feature.py similarity index 100% rename from gpt_engineer/applications/interactive_cli/feature.py rename to gpt_engineer/applications/feature_cli/feature.py diff --git a/gpt_engineer/applications/interactive_cli/file_selection.py b/gpt_engineer/applications/feature_cli/file_selection.py similarity index 100% rename from gpt_engineer/applications/interactive_cli/file_selection.py rename to gpt_engineer/applications/feature_cli/file_selection.py diff --git a/gpt_engineer/applications/interactive_cli/files.py b/gpt_engineer/applications/feature_cli/files.py similarity index 100% rename from gpt_engineer/applications/interactive_cli/files.py rename to gpt_engineer/applications/feature_cli/files.py diff --git a/gpt_engineer/applications/interactive_cli/generation_tools.py b/gpt_engineer/applications/feature_cli/generation_tools.py similarity index 100% rename from gpt_engineer/applications/interactive_cli/generation_tools.py rename to gpt_engineer/applications/feature_cli/generation_tools.py diff --git a/gpt_engineer/applications/interactive_cli/main.py b/gpt_engineer/applications/feature_cli/main.py similarity index 85% rename from gpt_engineer/applications/interactive_cli/main.py rename to gpt_engineer/applications/feature_cli/main.py index 3ef8e4d50a..661c5e9c1e 100644 --- a/gpt_engineer/applications/interactive_cli/main.py +++ b/gpt_engineer/applications/feature_cli/main.py @@ -2,15 +2,15 @@ from dotenv import load_dotenv -from gpt_engineer.applications.interactive_cli.agents.feature_agent import ( +from gpt_engineer.applications.feature_cli.agents.feature_agent import ( FeatureAgent, ) -from gpt_engineer.applications.interactive_cli.agents.chat_agent import ChatAgent -from gpt_engineer.applications.interactive_cli.feature import Feature -from gpt_engineer.applications.interactive_cli.task import Task -from gpt_engineer.applications.interactive_cli.repository import Repository -from gpt_engineer.applications.interactive_cli.domain import Settings -from gpt_engineer.applications.interactive_cli.file_selection import FileSelector +from gpt_engineer.applications.feature_cli.agents.chat_agent import ChatAgent +from gpt_engineer.applications.feature_cli.feature import Feature +from gpt_engineer.applications.feature_cli.task import Task +from gpt_engineer.applications.feature_cli.repository import Repository +from gpt_engineer.applications.feature_cli.domain import Settings +from gpt_engineer.applications.feature_cli.file_selection import FileSelector from gpt_engineer.core.ai import AI diff --git a/gpt_engineer/applications/interactive_cli/prompts/__init__.py b/gpt_engineer/applications/feature_cli/prompts/__init__.py similarity index 100% rename from gpt_engineer/applications/interactive_cli/prompts/__init__.py rename to gpt_engineer/applications/feature_cli/prompts/__init__.py diff --git a/gpt_engineer/applications/interactive_cli/prompts/fuzzy_file_parser b/gpt_engineer/applications/feature_cli/prompts/fuzzy_file_parser similarity index 100% rename from gpt_engineer/applications/interactive_cli/prompts/fuzzy_file_parser rename to gpt_engineer/applications/feature_cli/prompts/fuzzy_file_parser diff --git a/gpt_engineer/applications/interactive_cli/repository.py b/gpt_engineer/applications/feature_cli/repository.py similarity index 100% rename from gpt_engineer/applications/interactive_cli/repository.py rename to gpt_engineer/applications/feature_cli/repository.py diff --git a/gpt_engineer/applications/interactive_cli/task.py b/gpt_engineer/applications/feature_cli/task.py similarity index 100% rename from gpt_engineer/applications/interactive_cli/task.py rename to gpt_engineer/applications/feature_cli/task.py diff --git a/gpt_engineer/applications/interactive_cli/example_project/.gitignore b/gpt_engineer/applications/interactive_cli/example_project/.gitignore deleted file mode 100644 index d3c1171768..0000000000 --- a/gpt_engineer/applications/interactive_cli/example_project/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -.feature - -.gpteng -ignored_test diff --git a/gpt_engineer/applications/interactive_cli/example_project/README.md b/gpt_engineer/applications/interactive_cli/example_project/README.md deleted file mode 100644 index 0fe9092f35..0000000000 --- a/gpt_engineer/applications/interactive_cli/example_project/README.md +++ /dev/null @@ -1,8 +0,0 @@ -# Local Bakery Website - -Try this feature improvement - -Feature: I want to create a feedback form on the website to collect user feedback - -Task: Create a local sql lite database for development. Initialize it with a feedback table. The table can store a comment and a rating out of 5. - diff --git a/gpt_engineer/applications/interactive_cli/example_project/index.html b/gpt_engineer/applications/interactive_cli/example_project/index.html deleted file mode 100644 index 1dcaa1d780..0000000000 --- a/gpt_engineer/applications/interactive_cli/example_project/index.html +++ /dev/null @@ -1,42 +0,0 @@ - - - - - - Sweet Treats Bakery - - - -
-

Welcome to Sweet Treats Bakery

- -
- -
-

About Us

-

Family-owned bakery serving homemade treats since 1998. We pride ourselves on using local ingredients.

-
- - - -
-

Contact Us

-

Visit us or send a message!

-

Address: 123 Baking St, Foodtown, TX

-

Email: contact@sweettreatsbakery.com

-
- -
-

Thank you for visiting our website! Follow us on social media for updates.

-
- - diff --git a/gpt_engineer/applications/interactive_cli/example_project/styles.css b/gpt_engineer/applications/interactive_cli/example_project/styles.css deleted file mode 100644 index 34594dbb21..0000000000 --- a/gpt_engineer/applications/interactive_cli/example_project/styles.css +++ /dev/null @@ -1,49 +0,0 @@ -body { - font-family: 'Arial', sans-serif; - line-height: 1.6; - margin: 0; - padding: 0; - background: #f4f4f4; - color: #333; - display: flex; - flex-direction: column; -} - -header { - background: #c0392b; - color: #fff; - padding: 10px 20px; - text-align: center; -} - -header nav ul { - list-style: none; - padding: 0; -} - -header nav ul li { - display: inline; - margin-left: 10px; -} - -section { - margin: 20px; - padding: 20px; - background: #fff; -} - -footer { - text-align: center; - padding: 10px 20px; - background: #333; - color: #fff; -} - -a { - color: white; - text-decoration: none; -} - -a:hover { - text-decoration: underline; -} diff --git a/pyproject.toml b/pyproject.toml index 8512635977..9b4ac49ae3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -67,7 +67,7 @@ sphinx_copybutton = ">=0.5.2" gpt-engineer = 'gpt_engineer.applications.cli.main:app' ge = 'gpt_engineer.applications.cli.main:app' gpte = 'gpt_engineer.applications.cli.main:app' -gptf = 'gpt_engineer.applications.interactive_cli.main:app' +gptf = 'gpt_engineer.applications.feature_cli.main:app' bench = 'gpt_engineer.benchmark.__main__:app' gpte_test_application = 'tests.caching_main:app' From d93ca360256be421821da49f2e5a123741049665 Mon Sep 17 00:00:00 2001 From: Theo McCabe Date: Tue, 11 Jun 2024 14:01:55 +0100 Subject: [PATCH 35/36] hmmm --- .feature/files.yml | 4 + .gitignore | 4 - .../applications/feature_cli/example_project | 1 - .../applications/feature_cli/feature.py | 6 +- .../feature_cli/file_selection.py | 10 +-- .../feature_cli/generation_tools.py | 2 +- gpt_engineer/applications/feature_cli/main.py | 1 - .../applications/feature_cli/repository.py | 19 ++++- gpt_engineer/applications/feature_cli/task.py | 80 ------------------- .../__init__.py | 0 .../test_file_selection.py | 4 +- 11 files changed, 32 insertions(+), 99 deletions(-) create mode 100644 .feature/files.yml delete mode 160000 gpt_engineer/applications/feature_cli/example_project delete mode 100644 gpt_engineer/applications/feature_cli/task.py rename tests/applications/{interactive_cli => feature_cli}/__init__.py (100%) rename tests/applications/{interactive_cli => feature_cli}/test_file_selection.py (97%) diff --git a/.feature/files.yml b/.feature/files.yml new file mode 100644 index 0000000000..7cbb69d7f2 --- /dev/null +++ b/.feature/files.yml @@ -0,0 +1,4 @@ +# Uncomment any files you would like to use for this feature +# Note that (./) is a special key which represents files at the root of the parent directory + +{} diff --git a/.gitignore b/.gitignore index 7f731cfe72..a523e7dbe4 100644 --- a/.gitignore +++ b/.gitignore @@ -95,10 +95,6 @@ gpt_engineer/benchmark/benchmarks/mbpp/dataset prompt - -.feature -.task - gpt_engineer/benchmark/minimal_bench_config.toml test.json diff --git a/gpt_engineer/applications/feature_cli/example_project b/gpt_engineer/applications/feature_cli/example_project deleted file mode 160000 index b22fbe6c76..0000000000 --- a/gpt_engineer/applications/feature_cli/example_project +++ /dev/null @@ -1 +0,0 @@ -Subproject commit b22fbe6c760ac196edacdfb508ad300d033e12d9 diff --git a/gpt_engineer/applications/feature_cli/feature.py b/gpt_engineer/applications/feature_cli/feature.py index 52b11917c5..f8370bfbb3 100644 --- a/gpt_engineer/applications/feature_cli/feature.py +++ b/gpt_engineer/applications/feature_cli/feature.py @@ -7,8 +7,8 @@ from gpt_engineer.core.default.disk_memory import DiskMemory from gpt_engineer.core.default.paths import memory_path -from gpt_engineer.applications.interactive_cli_loop.file_selection import FileSelector -from gpt_engineer.applications.interactive_cli_loop.repository import Repository +from gpt_engineer.applications.feature_cli.file_selection import FileSelector +from gpt_engineer.applications.feature_cli.repository import Repository class Feature(DiskMemory): @@ -22,7 +22,7 @@ class Feature(DiskMemory): def __init__(self, project_path: Union[str, Path], repository: Repository): - self._feature_path = Path(memory_path(project_path)) / "feature" + self._feature_path = Path(project_path) / ".feature" self.path = self._feature_path self._feature_filename = "feature.md" self._progress_filename = "progress.json" diff --git a/gpt_engineer/applications/feature_cli/file_selection.py b/gpt_engineer/applications/feature_cli/file_selection.py index 6ccfeed17c..39b3979c3a 100644 --- a/gpt_engineer/applications/feature_cli/file_selection.py +++ b/gpt_engineer/applications/feature_cli/file_selection.py @@ -8,12 +8,12 @@ from gpt_engineer.core.default.paths import memory_path from gpt_engineer.core.ai import AI -from gpt_engineer.applications.interactive_cli_loop.repository import Repository -from gpt_engineer.applications.interactive_cli_loop.files import Files -from gpt_engineer.applications.interactive_cli_loop.generation_tools import ( +from gpt_engineer.applications.feature_cli.repository import Repository +from gpt_engineer.applications.feature_cli.files import Files +from gpt_engineer.applications.feature_cli.generation_tools import ( fuzzy_parse_file_selection, ) -from gpt_engineer.applications.interactive_cli_loop.domain import FileSelection +from gpt_engineer.applications.feature_cli.domain import FileSelection def paths_to_tree(paths): @@ -159,7 +159,7 @@ def __init__(self, project_path: str, repository: Repository): self.project_path = project_path self.ai = AI("gpt-4o", temperature=0) self.repository = repository - self.yaml_path = Path(memory_path(project_path)) / "files.yml" + self.yaml_path = Path(project_path) / ".feature" / "files.yml" if os.path.exists(self.yaml_path): return diff --git a/gpt_engineer/applications/feature_cli/generation_tools.py b/gpt_engineer/applications/feature_cli/generation_tools.py index 4d7582fdce..9afabd79f6 100644 --- a/gpt_engineer/applications/feature_cli/generation_tools.py +++ b/gpt_engineer/applications/feature_cli/generation_tools.py @@ -1,7 +1,7 @@ import xml.etree.ElementTree as ET import json -from gpt_engineer.applications.interactive_cli_loop.domain import FileSelection +from gpt_engineer.applications.feature_cli.domain import FileSelection from gpt_engineer.core.ai import AI from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler diff --git a/gpt_engineer/applications/feature_cli/main.py b/gpt_engineer/applications/feature_cli/main.py index 661c5e9c1e..cd5c1a0c6a 100644 --- a/gpt_engineer/applications/feature_cli/main.py +++ b/gpt_engineer/applications/feature_cli/main.py @@ -7,7 +7,6 @@ ) from gpt_engineer.applications.feature_cli.agents.chat_agent import ChatAgent from gpt_engineer.applications.feature_cli.feature import Feature -from gpt_engineer.applications.feature_cli.task import Task from gpt_engineer.applications.feature_cli.repository import Repository from gpt_engineer.applications.feature_cli.domain import Settings from gpt_engineer.applications.feature_cli.file_selection import FileSelector diff --git a/gpt_engineer/applications/feature_cli/repository.py b/gpt_engineer/applications/feature_cli/repository.py index f6d7d9c054..7c5b4e798e 100644 --- a/gpt_engineer/applications/feature_cli/repository.py +++ b/gpt_engineer/applications/feature_cli/repository.py @@ -44,11 +44,26 @@ def __init__(self, repo_path: str): def get_tracked_files(self) -> List[str]: """ - List all files that are currently tracked by Git in the repository. + List all files that are currently tracked by Git in the repository, + ignoring submodules. """ try: + # Get all tracked files tracked_files = self.repo.git.ls_files().split("\n") - return tracked_files + + # Get the list of submodule paths + submodule_paths = self.repo.git.submodule( + "foreach", "--quiet", "echo $sm_path" + ).split("\n") + + # Filter out submodule paths from tracked files + filtered_files = [ + file + for file in tracked_files + if not any(file.startswith(submodule) for submodule in submodule_paths) + ] + + return filtered_files except GitCommandError as e: print(f"Error listing tracked files: {e}") return [] diff --git a/gpt_engineer/applications/feature_cli/task.py b/gpt_engineer/applications/feature_cli/task.py deleted file mode 100644 index a4f46aa89c..0000000000 --- a/gpt_engineer/applications/feature_cli/task.py +++ /dev/null @@ -1,80 +0,0 @@ -import json -import os -import platform -import subprocess -import shutil -from pathlib import Path -from typing import Union - -from gpt_engineer.core.default.disk_memory import DiskMemory -from gpt_engineer.core.default.paths import memory_path -from gpt_engineer.applications.interactive_cli_loop.file_selection import FileSelector -from gpt_engineer.applications.interactive_cli_loop.repository import Repository - - -class Task(DiskMemory): - """ - Represents a task that will be done one off without the wider context of a feature - """ - - def __init__(self, project_path: Union[str, Path]): - - self._task_path = Path(memory_path(project_path)) / "task" - self.path = self._task_path - self._task_filename = "task.md" - self._files_filename = "files.yml" - - if not os.path.exists(self._task_path): - os.makedirs(self._task_path) - - self.set_task("Please replace with task description") - - super().__init__(self._task_path) - - def delete(self): - shutil.rmtree(self._task_path) - - def set_task(self, task: str): - """ - Updates the task file with new text. - - Parameters - ---------- - task : str - The new task to write to the feature file. - """ - super().__setitem__(self._task_filename, task) - - def get_task(self) -> str: - """ - Retrieve the content of the task file in the database. - - Returns - ------- - str - The content of the feature file. - """ - return super().__getitem__(self._task_filename) - - def _file_path(self, filename): - return self._task_path / filename - - def _open_file_in_editor(self, path): - """ - Opens the generated YAML file in the default system editor. - If the YAML file is empty or doesn't exist, generate it first. - """ - - # Platform-specific methods to open the file - if platform.system() == "Windows": - os.startfile(path) - elif platform.system() == "Darwin": - subprocess.run(["open", path]) - else: # Linux and other Unix-like systems - subprocess.run(["xdg-open", path]) - - def open_task_in_editor(self): - """ - Opens the task file in the default system editor. - """ - self._open_file_in_editor(self._file_path(self._task_filename)) diff --git a/tests/applications/interactive_cli/__init__.py b/tests/applications/feature_cli/__init__.py similarity index 100% rename from tests/applications/interactive_cli/__init__.py rename to tests/applications/feature_cli/__init__.py diff --git a/tests/applications/interactive_cli/test_file_selection.py b/tests/applications/feature_cli/test_file_selection.py similarity index 97% rename from tests/applications/interactive_cli/test_file_selection.py rename to tests/applications/feature_cli/test_file_selection.py index 19a655c191..2e9cd514fa 100644 --- a/tests/applications/interactive_cli/test_file_selection.py +++ b/tests/applications/feature_cli/test_file_selection.py @@ -4,7 +4,7 @@ from gpt_engineer.core.ai import AI -from gpt_engineer.applications.interactive_cli_loop.file_selection import ( +from gpt_engineer.applications.feature_cli.file_selection import ( FileSelection, paths_to_tree, tree_to_paths, @@ -13,7 +13,7 @@ commented_yaml_to_file_selection, ) -from gpt_engineer.applications.interactive_cli_loop.generation_tools import ( +from gpt_engineer.applications.feature_cli.generation_tools import ( fuzzy_parse_file_selection, ) From 59e49431907b907bcfbc10d5588a3f33890651a1 Mon Sep 17 00:00:00 2001 From: Theo McCabe Date: Tue, 11 Jun 2024 22:14:13 +0100 Subject: [PATCH 36/36] chat agent working --- .feature/files.yml | 251 +++++++++++++++++- .gitignore | 3 + .../feature_cli/agents/agent_steps.py | 39 +++ .../feature_cli/agents/chat_agent.py | 15 +- .../feature_cli/agents/feature_agent.py | 7 +- .../feature_cli/agents/simple_task_agent.py | 94 +++++++ .../applications/feature_cli/feature.py | 17 +- .../applications/feature_cli/repository.py | 19 +- gpt_engineer/applications/feature_cli/task.py | 75 ++++++ 9 files changed, 490 insertions(+), 30 deletions(-) create mode 100644 gpt_engineer/applications/feature_cli/agents/simple_task_agent.py create mode 100644 gpt_engineer/applications/feature_cli/task.py diff --git a/.feature/files.yml b/.feature/files.yml index 7cbb69d7f2..78fa862183 100644 --- a/.feature/files.yml +++ b/.feature/files.yml @@ -1,4 +1,253 @@ # Uncomment any files you would like to use for this feature # Note that (./) is a special key which represents files at the root of the parent directory -{} +.feature: + #- files.yml +.github: + ISSUE_TEMPLATE: + #- bug-report.md + #- documentation-clarification.md + #- feature-request.md + PULL_REQUEST_TEMPLATE: + #- PULL_REQUEST_TEMPLATE.md + workflows: + #- automation.yml + #- ci.yaml + #- pre-commit.yaml + #- release.yaml + (./): + #- CODEOWNERS + #- CODE_OF_CONDUCT.md + #- CONTRIBUTING.md + #- FUNDING.yml +docker: + #- Dockerfile + #- README.md + #- entrypoint.sh +docs: + examples: + open_llms: + #- README.md + #- langchain_interface.py + #- openai_api_interface.py + (./): + #- Makefile + #- api_reference.rst + #- code_conduct_link.rst + #- conf.py + #- contributing_link.rst + #- create_api_rst.py + #- disclaimer_link.rst + #- docs_building.md + #- index.rst + #- installation.rst + #- introduction.md + #- make.bat + #- open_models.md + #- quickstart.rst + #- roadmap_link.rst + #- terms_link.rst + #- tracing_debugging.md + #- windows_readme_link.rst +gpt_engineer: + applications: + cli: + #- __init__.py + #- cli_agent.py + #- collect.py + #- file_selector.py + #- learning.py + #- main.py + feature_cli: + agents: + - __init__.py + - agent_steps.py + - chat_agent.py + - feature_agent.py + prompts: + - __init__.py + - fuzzy_file_parser + (./): + - __init__.py + - domain.py + - feature.py + - file_selection.py + - files.py + - generation_tools.py + - main.py + - repository.py + (./): + #- __init__.py + benchmark: + benchmarks: + apps: + #- load.py + #- problem.py + #- problems.py + gptme: + #- load.py + mbpp: + #- load.py + #- problem.py + #- problems.py + (./): + #- load.py + (./): + #- __init__.py + #- __main__.py + #- bench_config.py + #- default_bench_config.toml + #- run.py + #- types.py + core: + default: + #- __init__.py + #- constants.py + #- disk_execution_env.py + #- disk_memory.py + #- file_store.py + #- paths.py + #- simple_agent.py + #- steps.py + (./): + #- __init__.py + #- ai.py + #- base_agent.py + #- base_execution_env.py + #- base_memory.py + #- chat_to_files.py + #- diff.py + #- files_dict.py + #- git.py + #- linting.py + #- preprompts_holder.py + #- project_config.py + #- prompt.py + #- token_usage.py + #- version_manager.py + preprompts: + #- clarify + #- entrypoint + #- file_format + #- file_format_diff + #- file_format_fix + #- generate + #- improve + #- philosophy + #- roadmap + tools: + #- __init__.py + #- custom_steps.py + #- supported_languages.py + (./): + #- __init__.py +projects: + example: + #- prompt + example-improve: + #- README.md + #- controller.py + #- main.py + #- model.py + #- prompt + #- requirements.txt + #- run.sh + #- view.py + example-vision: + images: + #- ux_diagram.png + (./): + #- navigation.html + #- prompt +scripts: + #- clean_benchmarks.py + #- legacy_benchmark.py + #- print_chat.py + #- test_api.py +tests: + applications: + cli: + #- __init__.py + #- test_cli_agent.py + #- test_collect.py + #- test_collection_consent.py + #- test_learning.py + #- test_main.py + feature_cli: + #- __init__.py + #- test_file_selection.py + (./): + #- __init__.py + benchmark: + #- test_BenchConfig.py + core: + default: + #- __init__.py + #- test_disk_execution_env.py + #- test_disk_file_repository.py + #- test_simple_agent.py + #- test_steps.py + improve_function_test_cases: + #- apps_benchmark_6_chat + #- apps_benchmark_6_code + #- apps_benchmark_6_v2_chat + #- apps_benchmark_6_v2_code + #- controller_chat + #- controller_code + #- corrected_diff_from_missing_lines + #- create_two_new_files_chat + #- create_two_new_files_code + #- simple_calculator_chat + #- simple_calculator_code + #- task_master_chat + #- task_master_code + #- temperature_converter_chat + #- temperature_converter_code + #- theo_case_chat + #- theo_case_code + #- vgvishesh_example_2_chat + #- vgvishesh_example_2_code + #- vgvishesh_example_chat + #- vgvishesh_example_code + #- wheaties_example_chat + #- wheaties_example_code + (./): + #- __init__.py + #- test_ai.py + #- test_chat_to_files.py + #- test_git.py + #- test_salvage_correct_hunks.py + #- test_token_usage.py + test_data: + #- mona_lisa.jpg + tools: + #- example_snake_files.py + (./): + #- __init__.py + #- ai_cache.json + #- mock_ai.py + #- test_install.py + #- test_project_config.py +(./): +#- .dockerignore +#- .env.template +#- .gitignore +#- .pre-commit-config.yaml +#- .readthedocs.yaml +#- Acknowledgements.md +#- DISCLAIMER.md +#- GOVERNANCE.md +#- LICENSE +#- MANIFEST.in +#- Makefile +#- README.md +#- ROADMAP.md +#- TERMS_OF_USE.md +#- WINDOWS_README.md +#- citation.cff +#- docker-compose.yml +#- poetry.lock +#- pyproject.toml +#- quicktest.py +#- sweep.yaml +#- tox.ini diff --git a/.gitignore b/.gitignore index a523e7dbe4..81c4a87511 100644 --- a/.gitignore +++ b/.gitignore @@ -95,6 +95,9 @@ gpt_engineer/benchmark/benchmarks/mbpp/dataset prompt +.feature +.task + gpt_engineer/benchmark/minimal_bench_config.toml test.json diff --git a/gpt_engineer/applications/feature_cli/agents/agent_steps.py b/gpt_engineer/applications/feature_cli/agents/agent_steps.py index 9fe5ef4d9d..3c9067ba4d 100644 --- a/gpt_engineer/applications/feature_cli/agents/agent_steps.py +++ b/gpt_engineer/applications/feature_cli/agents/agent_steps.py @@ -219,6 +219,12 @@ def initiate_new_task(ai, feature, git_context, file_selector): return +def get_git_context(repository): + with yaspin(text="Gathering git context...") as spinner: + git_context = repository.get_git_context() + spinner.ok("✔") + + def suggest_new_tasks(ai, feature, git_context, file_selector): files = file_selector.get_included_as_file_repository() @@ -501,3 +507,36 @@ def review_changes( if result == "x": print("exiting...") return + + +def confirm_chat_feature(): + + completer = WordCompleter(["1", "2", "3", "4", "5", "x"], ignore_case=True) + session = InputSession() + + result = session.prompt( + HTML( + """Active Feature Detected + +Would you like to: + +1 - Chat with feaure context and code +2 - Chat with code only + +x - Exit + +""" + ), + completer=completer, + ).lower() + + print() + + if result == "1": + return True + if result == "2": + return False + + if result == "x": + print("exiting...") + return diff --git a/gpt_engineer/applications/feature_cli/agents/chat_agent.py b/gpt_engineer/applications/feature_cli/agents/chat_agent.py index b7cad145a9..34b7f51881 100644 --- a/gpt_engineer/applications/feature_cli/agents/chat_agent.py +++ b/gpt_engineer/applications/feature_cli/agents/chat_agent.py @@ -6,6 +6,8 @@ from gpt_engineer.applications.feature_cli.file_selection import FileSelector from gpt_engineer.applications.feature_cli.agents.agent_steps import ( update_user_file_selection, + confirm_chat_feature, + get_git_context, ) from gpt_engineer.applications.feature_cli.generation_tools import ( build_files_context_string, @@ -36,9 +38,16 @@ def start(self): files = Files(self.project_path, selected_files) - context_string = build_files_context_string( - self.feature, self.repository.get_git_context(), files - ) + context_string = f"Files from code repository:\n\n{files.to_chat()}" + + if self.feature.has_description(): + with_feature = confirm_chat_feature() + + if with_feature: + git_context = get_git_context(self.repository) + context_string = build_files_context_string( + self.feature, git_context, files + ) system = f"""You are the chat function of an AI software engineering tool called gpt engineer. diff --git a/gpt_engineer/applications/feature_cli/agents/feature_agent.py b/gpt_engineer/applications/feature_cli/agents/feature_agent.py index df16f4f2d9..a81216adbe 100644 --- a/gpt_engineer/applications/feature_cli/agents/feature_agent.py +++ b/gpt_engineer/applications/feature_cli/agents/feature_agent.py @@ -11,6 +11,7 @@ review_changes, check_existing_task, check_for_unstaged_changes, + get_git_context, ) # Bottom comment for testing! @@ -63,7 +64,7 @@ def run_task(self): or -Run gptf --no-feature to implement task without a feature""" +Run gptf task --no-feature to implement task without a feature""" ) return @@ -74,9 +75,7 @@ def run_task(self): return while True: - with yaspin(text="Gathering git context...") as spinner: - git_context = self.repository.get_git_context() - spinner.ok("✔") + git_context = get_git_context(self.repository) if not self.feature.has_task(): initiate_new_task( diff --git a/gpt_engineer/applications/feature_cli/agents/simple_task_agent.py b/gpt_engineer/applications/feature_cli/agents/simple_task_agent.py new file mode 100644 index 0000000000..51c5e1e0c7 --- /dev/null +++ b/gpt_engineer/applications/feature_cli/agents/simple_task_agent.py @@ -0,0 +1,94 @@ +from gpt_engineer.applications.feature_cli.task import Task +from gpt_engineer.applications.feature_cli.repository import Repository +from gpt_engineer.applications.feature_cli.files import Files +from gpt_engineer.applications.feature_cli.file_selection import FileSelector +from gpt_engineer.applications.feature_cli.agents.agent_steps import ( + adjust_prompt_files, + check_for_unstaged_changes, + update_user_file_selection, +) + +from gpt_engineer.core.ai import AI +from gpt_engineer.core.prompt import Prompt +from gpt_engineer.core.default.steps import improve_fn, handle_improve_mode +from gpt_engineer.core.default.disk_memory import DiskMemory +from gpt_engineer.core.default.paths import PREPROMPTS_PATH, memory_path +from gpt_engineer.core.preprompts_holder import PrepromptsHolder + +from prompt_toolkit import prompt as cli_input + + +class TaskAgent: + """ + A cli agent which implements a one off task + """ + + def __init__( + self, + ai: AI, + project_path: str, + task: Task, + repository: Repository, + file_selector: FileSelector, + ): + self.ai = ai + self.project_path = project_path + self.task = task + self.repository = repository + self.file_selector = file_selector + + def _confirm__task_with_user(self): + file_selector = self.file_selector + file_selector.update_yaml_from_tracked_files() + file_string = file_selector.get_pretty_selected_from_yaml() + + task = self.task.get_task() + + print(f"Files: \n\nrepo\n{file_string}\n\n") + print(f"Task: {task}\n\n") + + # do you want to attempt this task? + if cli_input("Do you want to implement this task? y/n: ").lower() in [ + "y", + "yes", + ]: + return True + + return False + + def _run_improve_mode(self): + memory = DiskMemory(memory_path(self.project_path)) + preprompts_holder = PrepromptsHolder(PREPROMPTS_PATH) + + prompt = Prompt(self.task.get_task()) + + selected_files = self.file_selector.get_from_yaml().included_files + + files = Files(self.project_path, selected_files) + + improve_lambda = lambda: improve_fn( + self.ai, prompt, files, memory, preprompts_holder + ) + + print("\n---- begining code generation ----\n") + updated_files_dictionary = handle_improve_mode(improve_lambda, memory) + print("\n---- ending code generation ----\n") + + files.write_to_disk(updated_files_dictionary) + + def run(self): + + self.task.open_task_in_editor() + input("Please edit the task file and then press Enter to continue...") + + update_user_file_selection(self.file_selector) + + implement = self._confirm__task_with_user() + + while not implement: + adjust_prompt_files() + implement = self._confirm__task_with_user() + + check_for_unstaged_changes(self.repository) + + self._run_improve_mode() diff --git a/gpt_engineer/applications/feature_cli/feature.py b/gpt_engineer/applications/feature_cli/feature.py index f8370bfbb3..b2c24b6af9 100644 --- a/gpt_engineer/applications/feature_cli/feature.py +++ b/gpt_engineer/applications/feature_cli/feature.py @@ -57,7 +57,10 @@ def get_description(self) -> str: str The content of the feature file. """ - return super().__getitem__(self._feature_filename) + if super().__contains__(self._feature_filename): + return super().__getitem__(self._feature_filename) + + return None def set_description(self, feature_description: str): """ @@ -92,9 +95,10 @@ def get_progress(self) -> dict: The content of the feature file. """ - json_string = super().__getitem__(self._progress_filename) - if json_string: - return json.loads(json_string) + if super().__contains__(self._progress_filename): + json_string = super().__getitem__(self._progress_filename) + if json_string: + return json.loads(json_string) return None @@ -135,7 +139,10 @@ def get_task(self) -> str: str The content of the feature file. """ - return super().__getitem__(self._task_filename) + if super().__contains__(self._task_filename): + return super().__getitem__(self._task_filename) + + return None def has_task(self) -> bool: """ diff --git a/gpt_engineer/applications/feature_cli/repository.py b/gpt_engineer/applications/feature_cli/repository.py index 7c5b4e798e..f6d7d9c054 100644 --- a/gpt_engineer/applications/feature_cli/repository.py +++ b/gpt_engineer/applications/feature_cli/repository.py @@ -44,26 +44,11 @@ def __init__(self, repo_path: str): def get_tracked_files(self) -> List[str]: """ - List all files that are currently tracked by Git in the repository, - ignoring submodules. + List all files that are currently tracked by Git in the repository. """ try: - # Get all tracked files tracked_files = self.repo.git.ls_files().split("\n") - - # Get the list of submodule paths - submodule_paths = self.repo.git.submodule( - "foreach", "--quiet", "echo $sm_path" - ).split("\n") - - # Filter out submodule paths from tracked files - filtered_files = [ - file - for file in tracked_files - if not any(file.startswith(submodule) for submodule in submodule_paths) - ] - - return filtered_files + return tracked_files except GitCommandError as e: print(f"Error listing tracked files: {e}") return [] diff --git a/gpt_engineer/applications/feature_cli/task.py b/gpt_engineer/applications/feature_cli/task.py new file mode 100644 index 0000000000..41b422212b --- /dev/null +++ b/gpt_engineer/applications/feature_cli/task.py @@ -0,0 +1,75 @@ +import os +import platform +import subprocess +import shutil +from pathlib import Path +from typing import Union + +from gpt_engineer.core.default.disk_memory import DiskMemory +from gpt_engineer.core.default.paths import memory_path + + +class Task(DiskMemory): + """ + Represents a task that will be done one off without the wider context of a feature + """ + + def __init__(self, project_path: Union[str, Path]): + + self._task_path = Path(memory_path(project_path)) / "task" + self.path = self._task_path + self._task_filename = "task.md" + self._files_filename = "files.yml" + + if not os.path.exists(self._task_path): + os.makedirs(self._task_path) + + self.set_task("Please replace with task description") + + super().__init__(self._task_path) + + def delete(self): + shutil.rmtree(self._task_path) + + def set_task(self, task: str): + """ + Updates the task file with new text. + Parameters + ---------- + task : str + The new task to write to the feature file. + """ + super().__setitem__(self._task_filename, task) + + def get_task(self) -> str: + """ + Retrieve the content of the task file in the database. + Returns + ------- + str + The content of the feature file. + """ + return super().__getitem__(self._task_filename) + + def _file_path(self, filename): + return self._task_path / filename + + def _open_file_in_editor(self, path): + """ + Opens the generated YAML file in the default system editor. + If the YAML file is empty or doesn't exist, generate it first. + """ + + # Platform-specific methods to open the file + if platform.system() == "Windows": + os.startfile(path) + elif platform.system() == "Darwin": + subprocess.run(["open", path]) + else: # Linux and other Unix-like systems + subprocess.run(["xdg-open", path]) + + def open_task_in_editor(self): + """ + Opens the task file in the default system editor. + """ + self._open_file_in_editor(self._file_path(self._task_filename))