206 lines
5.6 KiB
Go
206 lines
5.6 KiB
Go
package llm
|
|
|
|
import (
|
|
"context"
|
|
"encoding/json"
|
|
"fmt"
|
|
"strings"
|
|
"time"
|
|
|
|
"tell-me/mcp"
|
|
"tell-me/tools"
|
|
|
|
"github.com/sashabaranov/go-openai"
|
|
)
|
|
|
|
// Client wraps the OpenAI client for LLM interactions
|
|
type Client struct {
|
|
client *openai.Client
|
|
model string
|
|
contextSize int
|
|
searxngURL string
|
|
mcpManager *mcp.Manager
|
|
}
|
|
|
|
// NewClient creates a new LLM client
|
|
func NewClient(apiURL, apiKey, model string, contextSize int, searxngURL string, mcpManager *mcp.Manager) *Client {
|
|
config := openai.DefaultConfig(apiKey)
|
|
config.BaseURL = apiURL
|
|
|
|
client := openai.NewClientWithConfig(config)
|
|
|
|
return &Client{
|
|
client: client,
|
|
model: model,
|
|
contextSize: contextSize,
|
|
searxngURL: searxngURL,
|
|
mcpManager: mcpManager,
|
|
}
|
|
}
|
|
|
|
// GetSystemPrompt returns the system prompt with current date appended
|
|
func GetSystemPrompt(prompt string) string {
|
|
currentDate := time.Now().Format("2006-01-02")
|
|
return fmt.Sprintf("%s\n\nCURRENT DATE: %s", prompt, currentDate)
|
|
}
|
|
|
|
// GetTools returns the tool definitions for the LLM (built-in tools only)
|
|
func GetBuiltInTools() []openai.Tool {
|
|
return []openai.Tool{
|
|
{
|
|
Type: openai.ToolTypeFunction,
|
|
Function: &openai.FunctionDefinition{
|
|
Name: "web_search",
|
|
Description: "Search the internet for information using SearXNG. Use this tool to find current information, facts, news, or any knowledge you need to answer the user's question.",
|
|
Parameters: json.RawMessage(`{
|
|
"type": "object",
|
|
"properties": {
|
|
"query": {
|
|
"type": "string",
|
|
"description": "The search query to find relevant information"
|
|
}
|
|
},
|
|
"required": ["query"]
|
|
}`),
|
|
},
|
|
},
|
|
{
|
|
Type: openai.ToolTypeFunction,
|
|
Function: &openai.FunctionDefinition{
|
|
Name: "fetch_articles",
|
|
Description: "Fetch and read content from 1-5 articles at once. Provide both titles and URLs from search results. The HTML will be converted to clean text format and combined. Use this after searching to read the most relevant pages together.",
|
|
Parameters: json.RawMessage(`{
|
|
"type": "object",
|
|
"properties": {
|
|
"articles": {
|
|
"type": "array",
|
|
"items": {
|
|
"type": "object",
|
|
"properties": {
|
|
"title": {
|
|
"type": "string",
|
|
"description": "The title of the article from search results"
|
|
},
|
|
"url": {
|
|
"type": "string",
|
|
"description": "The URL to fetch (must start with http:// or https://)"
|
|
}
|
|
},
|
|
"required": ["title", "url"]
|
|
},
|
|
"description": "Array of articles with titles and URLs (1-5 recommended, max 5)"
|
|
}
|
|
},
|
|
"required": ["articles"]
|
|
}`),
|
|
},
|
|
},
|
|
}
|
|
}
|
|
|
|
// GetTools returns all available tools (built-in + MCP tools)
|
|
func (c *Client) GetTools() []openai.Tool {
|
|
tools := GetBuiltInTools()
|
|
|
|
// Add MCP tools if manager is available
|
|
if c.mcpManager != nil {
|
|
mcpTools := c.mcpManager.GetAllTools()
|
|
tools = append(tools, mcpTools...)
|
|
}
|
|
|
|
return tools
|
|
}
|
|
|
|
// Chat sends a message and handles tool calls
|
|
func (c *Client) Chat(ctx context.Context, messages []openai.ChatCompletionMessage) (string, []openai.ChatCompletionMessage, error) {
|
|
req := openai.ChatCompletionRequest{
|
|
Model: c.model,
|
|
Messages: messages,
|
|
Tools: c.GetTools(),
|
|
}
|
|
|
|
resp, err := c.client.CreateChatCompletion(ctx, req)
|
|
if err != nil {
|
|
return "", messages, fmt.Errorf("chat completion failed: %w", err)
|
|
}
|
|
|
|
choice := resp.Choices[0]
|
|
messages = append(messages, choice.Message)
|
|
|
|
// Handle tool calls
|
|
if len(choice.Message.ToolCalls) > 0 {
|
|
for _, toolCall := range choice.Message.ToolCalls {
|
|
result := c.handleToolCall(ctx, toolCall)
|
|
|
|
// Add tool response to messages
|
|
messages = append(messages, openai.ChatCompletionMessage{
|
|
Role: openai.ChatMessageRoleTool,
|
|
Content: result,
|
|
ToolCallID: toolCall.ID,
|
|
})
|
|
}
|
|
|
|
// Make another call with tool results
|
|
return c.Chat(ctx, messages)
|
|
}
|
|
|
|
return choice.Message.Content, messages, nil
|
|
}
|
|
|
|
// handleToolCall routes tool calls to the appropriate handler
|
|
func (c *Client) handleToolCall(ctx context.Context, toolCall openai.ToolCall) string {
|
|
toolName := toolCall.Function.Name
|
|
|
|
// Check if it's a built-in tool
|
|
switch toolName {
|
|
case "web_search":
|
|
var args struct {
|
|
Query string `json:"query"`
|
|
}
|
|
if err := json.Unmarshal([]byte(toolCall.Function.Arguments), &args); err != nil {
|
|
return fmt.Sprintf("Error parsing arguments: %v", err)
|
|
}
|
|
fmt.Printf("Searching: %s\n", args.Query)
|
|
result, err := tools.WebSearch(c.searxngURL, args.Query)
|
|
if err != nil {
|
|
return fmt.Sprintf("Search error: %v", err)
|
|
}
|
|
return result
|
|
|
|
case "fetch_articles":
|
|
var args struct {
|
|
Articles []struct {
|
|
Title string `json:"title"`
|
|
URL string `json:"url"`
|
|
} `json:"articles"`
|
|
}
|
|
if err := json.Unmarshal([]byte(toolCall.Function.Arguments), &args); err != nil {
|
|
return fmt.Sprintf("Error parsing arguments: %v", err)
|
|
}
|
|
fmt.Printf("Reading %d articles:\n", len(args.Articles))
|
|
urls := make([]string, len(args.Articles))
|
|
for i, article := range args.Articles {
|
|
fmt.Printf(" - %s\n", article.Title)
|
|
urls[i] = article.URL
|
|
}
|
|
result, err := tools.FetchArticles(urls)
|
|
if err != nil {
|
|
return fmt.Sprintf("Fetch error: %v", err)
|
|
}
|
|
return result
|
|
|
|
default:
|
|
// Check if it's an MCP tool (format: servername_toolname)
|
|
if c.mcpManager != nil && strings.Contains(toolName, "_") {
|
|
fmt.Printf("Calling MCP tool: %s\n", toolName)
|
|
result, err := c.mcpManager.CallTool(ctx, toolName, toolCall.Function.Arguments)
|
|
if err != nil {
|
|
return fmt.Sprintf("MCP tool error: %v", err)
|
|
}
|
|
return result
|
|
}
|
|
|
|
return fmt.Sprintf("Unknown tool: %s", toolName)
|
|
}
|
|
}
|