86 lines
3.5 KiB
Python
86 lines
3.5 KiB
Python
import time
|
|
import json
|
|
from openai import OpenAI
|
|
import chat_tools
|
|
from bot_util import get_api_details
|
|
|
|
# initialize parameters
|
|
MODEL = "deepseek-chat"
|
|
chat_tools = chat_tools.get_chat_tools()
|
|
history = []
|
|
|
|
# Initialize the OpenAI client with the API key and base URL
|
|
client = OpenAI(
|
|
api_key=get_api_details().api_key,
|
|
base_url=get_api_details().api_url,
|
|
)
|
|
|
|
|
|
MAX_RETRIES = 3
|
|
RETRY_DELAY = 2 # seconds
|
|
count = 0
|
|
|
|
system_prompt = "You are a helpful assistant that good at writing programs. you are able to generate project structures and write code. you can only help user when you know enought information about the project. For example, if it is a maven project, you need to know the group id, artifact id, main class name, main method name and the dependencies. If the user does not provide enough information, you should ask the user for more details."
|
|
content = ""
|
|
with open("test_data/k8s_day10.txt", "r") as f:
|
|
for line in f:
|
|
content += line
|
|
|
|
def chat_with_deepseek(max_retries: int =MAX_RETRIES, retry_delay: int =RETRY_DELAY):
|
|
# Initialize the chat history with the system prompt
|
|
global content
|
|
history.append({"role": "system", "content": system_prompt})
|
|
history.append({"role": "user", "content": f"please consturct project with below requirements: \n \
|
|
I need a maven project with group id dev.kualu and artifact id demo. \
|
|
The dependency is spring-boot-starter-web. The main class is com.kualu.demo.DemoApplication. \
|
|
The main method should be in the DemoApplication class. Please generate the project foles along with pom.xml. \
|
|
Please write these files to local disk."})
|
|
count = 0
|
|
for _ in range(max_retries):
|
|
try:
|
|
response = client.chat.completions.create(
|
|
model=MODEL,
|
|
messages=history,
|
|
tools=chat_tools.get_tools(),
|
|
)
|
|
|
|
if response.choices[0].message.tool_calls:
|
|
tool_call = response.choices[0].message.tool_calls[0]
|
|
tool_name = tool_call.function.name
|
|
tool_args = tool_call.function.arguments
|
|
tool_id = tool_call.id
|
|
# Call the function with the arguments
|
|
if tool_name == "write_file_batch":
|
|
params = json.loads(tool_args)
|
|
file_path = params["file_paths"]
|
|
content = params["contents"]
|
|
result = chat_tools.util.write_file_batch(file_path, content)
|
|
# history.append(response.choices[0].message)
|
|
history.append({"role": "tool", "tool_call_id": tool_id, "content": result})
|
|
|
|
|
|
response = client.chat.completions.create(
|
|
model=MODEL,
|
|
messages=history,
|
|
)
|
|
return response.choices[0].message.content
|
|
else:
|
|
# for chunk in response:
|
|
# text = chunk.choices[0].delta.content
|
|
# if text:
|
|
# sys.stdout.write(text)
|
|
# sys.stdout.flush()
|
|
return response.choices[0].message.content
|
|
|
|
break
|
|
except Exception as e:
|
|
count += 1
|
|
print(f"An error occurre: {e}.")
|
|
if count < max_retries:
|
|
print(f"Retrying in {retry_delay} seconds...")
|
|
time.sleep(retry_delay)
|
|
else:
|
|
print("Max retries reached. Exiting.")
|
|
raise e
|
|
|
|
print(chat_with_deepseek(max_retries=1)) |