src/chat_manager.py gelöscht
This commit is contained in:
parent
9945139e9b
commit
ab0ac48400
@ -1,281 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
"""Chat management module for the codeeditor application."""
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
from typing import Dict, List, Optional, Tuple
|
|
||||||
|
|
||||||
import anthropic
|
|
||||||
import openai
|
|
||||||
from dotenv import load_dotenv
|
|
||||||
|
|
||||||
from codeeditor.system_prompter import SystemPrompter
|
|
||||||
|
|
||||||
# Load environment variables from .env file
|
|
||||||
load_dotenv()
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
# Set up API keys
|
|
||||||
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
|
|
||||||
ANTHROPIC_API_KEY = os.getenv("ANTHROPIC_API_KEY")
|
|
||||||
AI_PROVIDER = os.getenv("AI_PROVIDER", "anthropic").lower() # Default to Anthropic
|
|
||||||
|
|
||||||
|
|
||||||
class ChatManager:
|
|
||||||
"""Class for managing chat interactions with the AI assistant."""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
"""Initialize the ChatManager."""
|
|
||||||
self.system_prompter = SystemPrompter()
|
|
||||||
self.chat_history = []
|
|
||||||
self.current_file_path = None
|
|
||||||
self.current_file_content = None
|
|
||||||
|
|
||||||
# Initialize the appropriate client based on the provider
|
|
||||||
if AI_PROVIDER == "openai" and OPENAI_API_KEY:
|
|
||||||
self.provider = "openai"
|
|
||||||
self.model = os.getenv("OPENAI_MODEL", "gpt-4")
|
|
||||||
openai.api_key = OPENAI_API_KEY
|
|
||||||
logger.info("ChatManager initialized with OpenAI")
|
|
||||||
elif AI_PROVIDER == "anthropic" and ANTHROPIC_API_KEY:
|
|
||||||
self.provider = "anthropic"
|
|
||||||
self.model = os.getenv("ANTHROPIC_MODEL", "claude-3-opus-20240229")
|
|
||||||
self.client = anthropic.Anthropic(api_key=ANTHROPIC_API_KEY)
|
|
||||||
logger.info("ChatManager initialized with Anthropic")
|
|
||||||
else:
|
|
||||||
self.provider = "none"
|
|
||||||
logger.warning(
|
|
||||||
"No valid AI provider configured. Please set AI_PROVIDER and corresponding API key."
|
|
||||||
)
|
|
||||||
|
|
||||||
def send_message(
|
|
||||||
self, user_message: str, file_context: Optional[str] = None
|
|
||||||
) -> str:
|
|
||||||
"""Send a message to the AI assistant and get a response.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
user_message: The user's message to send to the AI.
|
|
||||||
file_context: Optional context from the current file.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
The AI's response as a string.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
# Parse file context if provided
|
|
||||||
if file_context:
|
|
||||||
(
|
|
||||||
self.current_file_path,
|
|
||||||
self.current_file_content,
|
|
||||||
) = self._parse_file_context(file_context)
|
|
||||||
|
|
||||||
# Check for special commands
|
|
||||||
if user_message.startswith("/"):
|
|
||||||
return self._handle_special_command(user_message)
|
|
||||||
|
|
||||||
# Generate system prompt with file context if provided
|
|
||||||
system_prompt = self.system_prompter.generate_prompt(file_context)
|
|
||||||
|
|
||||||
if self.provider == "openai":
|
|
||||||
return self._send_openai_message(system_prompt, user_message)
|
|
||||||
elif self.provider == "anthropic":
|
|
||||||
return self._send_anthropic_message(system_prompt, user_message)
|
|
||||||
else:
|
|
||||||
error_msg = (
|
|
||||||
"No valid AI provider configured. Please check your .env file."
|
|
||||||
)
|
|
||||||
logger.error(error_msg)
|
|
||||||
return f"Error: {error_msg}"
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
error_msg = f"Error communicating with AI assistant: {e}"
|
|
||||||
logger.error(error_msg)
|
|
||||||
return f"Error: {error_msg}"
|
|
||||||
|
|
||||||
def _parse_file_context(self, file_context: str) -> Tuple[str, str]:
|
|
||||||
"""Parse file context to extract file path and content.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
file_context: File context string.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Tuple of (file_path, file_content).
|
|
||||||
"""
|
|
||||||
lines = file_context.split("\n", 1)
|
|
||||||
file_path = lines[0].replace("File: ", "").strip()
|
|
||||||
file_content = lines[1] if len(lines) > 1 else ""
|
|
||||||
|
|
||||||
return file_path, file_content
|
|
||||||
|
|
||||||
def _handle_special_command(self, command: str) -> str:
|
|
||||||
"""Handle special commands starting with /.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
command: The command string.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Response to the command.
|
|
||||||
"""
|
|
||||||
command = command.strip().lower()
|
|
||||||
|
|
||||||
# Handle improve code command
|
|
||||||
if command.startswith("/improve") and self.current_file_content:
|
|
||||||
prompt = self.system_prompter.generate_code_improvement_prompt(
|
|
||||||
self.current_file_content, self.current_file_path
|
|
||||||
)
|
|
||||||
|
|
||||||
if self.provider == "openai":
|
|
||||||
return self._send_openai_message(prompt, "Please improve this code.")
|
|
||||||
elif self.provider == "anthropic":
|
|
||||||
return self._send_anthropic_message(prompt, "Please improve this code.")
|
|
||||||
|
|
||||||
# Handle explain code command
|
|
||||||
elif command.startswith("/explain") and self.current_file_content:
|
|
||||||
return self._send_message_with_context(
|
|
||||||
"Please explain how this code works in detail.",
|
|
||||||
f"File: {self.current_file_path}\n{self.current_file_content}",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Handle help command
|
|
||||||
elif command.startswith("/help"):
|
|
||||||
return (
|
|
||||||
"Available commands:\n"
|
|
||||||
"- /improve - Suggest improvements for the current file\n"
|
|
||||||
"- /explain - Explain how the current file's code works\n"
|
|
||||||
"- /help - Show this help message\n"
|
|
||||||
"- /clear - Clear the chat history"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Handle clear command
|
|
||||||
elif command.startswith("/clear"):
|
|
||||||
self.clear_history()
|
|
||||||
return "Chat history cleared."
|
|
||||||
|
|
||||||
return f"Unknown command: {command}. Type /help for available commands."
|
|
||||||
|
|
||||||
def _send_message_with_context(self, message: str, context: str) -> str:
|
|
||||||
"""Send a message with specific context.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
message: The message to send.
|
|
||||||
context: The context to include.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
The AI's response.
|
|
||||||
"""
|
|
||||||
system_prompt = self.system_prompter.generate_prompt(context)
|
|
||||||
|
|
||||||
if self.provider == "openai":
|
|
||||||
return self._send_openai_message(system_prompt, message)
|
|
||||||
elif self.provider == "anthropic":
|
|
||||||
return self._send_anthropic_message(system_prompt, message)
|
|
||||||
else:
|
|
||||||
return "No AI provider configured."
|
|
||||||
|
|
||||||
def _send_openai_message(self, system_prompt: str, user_message: str) -> str:
|
|
||||||
"""Send a message using OpenAI API.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
system_prompt: The system prompt to guide the AI.
|
|
||||||
user_message: The user's message.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
The AI's response as a string.
|
|
||||||
"""
|
|
||||||
# Prepare messages for the API call
|
|
||||||
messages = [{"role": "system", "content": system_prompt}]
|
|
||||||
|
|
||||||
# Add chat history
|
|
||||||
for msg in self.chat_history:
|
|
||||||
messages.append(msg)
|
|
||||||
|
|
||||||
# Add the new user message
|
|
||||||
messages.append({"role": "user", "content": user_message})
|
|
||||||
|
|
||||||
# Call the OpenAI API
|
|
||||||
response = openai.chat.completions.create(
|
|
||||||
model=self.model,
|
|
||||||
messages=messages,
|
|
||||||
temperature=0.7,
|
|
||||||
max_tokens=2000,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Extract the assistant's message
|
|
||||||
assistant_message = response.choices[0].message.content
|
|
||||||
|
|
||||||
# Update chat history
|
|
||||||
self.chat_history.append({"role": "user", "content": user_message})
|
|
||||||
self.chat_history.append({"role": "assistant", "content": assistant_message})
|
|
||||||
|
|
||||||
logger.info("Received response from OpenAI assistant")
|
|
||||||
return assistant_message
|
|
||||||
|
|
||||||
def _send_anthropic_message(self, system_prompt: str, user_message: str) -> str:
|
|
||||||
"""Send a message using Anthropic API.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
system_prompt: The system prompt to guide the AI.
|
|
||||||
user_message: The user's message.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
The AI's response as a string.
|
|
||||||
"""
|
|
||||||
# Prepare messages for the API call
|
|
||||||
messages = []
|
|
||||||
|
|
||||||
# Add chat history
|
|
||||||
for msg in self.chat_history:
|
|
||||||
role = msg["role"]
|
|
||||||
content = msg["content"]
|
|
||||||
if role == "user":
|
|
||||||
messages.append({"role": "user", "content": content})
|
|
||||||
elif role == "assistant":
|
|
||||||
messages.append({"role": "assistant", "content": content})
|
|
||||||
|
|
||||||
# Add the new user message
|
|
||||||
messages.append({"role": "user", "content": user_message})
|
|
||||||
|
|
||||||
# Call the Anthropic API
|
|
||||||
response = self.client.messages.create(
|
|
||||||
model=self.model,
|
|
||||||
system=system_prompt,
|
|
||||||
messages=messages,
|
|
||||||
max_tokens=2000,
|
|
||||||
temperature=0.7,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Extract the assistant's message
|
|
||||||
assistant_message = response.content[0].text
|
|
||||||
|
|
||||||
# Update chat history
|
|
||||||
self.chat_history.append({"role": "user", "content": user_message})
|
|
||||||
self.chat_history.append({"role": "assistant", "content": assistant_message})
|
|
||||||
|
|
||||||
logger.info("Received response from Anthropic assistant")
|
|
||||||
return assistant_message
|
|
||||||
|
|
||||||
def clear_history(self):
|
|
||||||
"""Clear the chat history."""
|
|
||||||
self.chat_history = []
|
|
||||||
logger.info("Chat history cleared")
|
|
||||||
|
|
||||||
def get_history(self) -> List[Dict[str, str]]:
|
|
||||||
"""Get the chat history.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
The chat history as a list of dictionaries.
|
|
||||||
"""
|
|
||||||
return self.chat_history
|
|
||||||
|
|
||||||
def set_current_file(self, file_path: str, file_content: str):
|
|
||||||
"""Set the current file being edited.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
file_path: Path to the current file.
|
|
||||||
file_content: Content of the current file.
|
|
||||||
"""
|
|
||||||
self.current_file_path = file_path
|
|
||||||
self.current_file_content = file_content
|
|
Loading…
x
Reference in New Issue
Block a user