Added MCP support

This commit is contained in:
Pavel Pivovarov
2025-12-15 15:15:40 +11:00
parent 272d223f73
commit 25e263b7a6
9 changed files with 635 additions and 201 deletions

81
main.go
View File

@@ -4,13 +4,16 @@ import (
"bufio"
"context"
"fmt"
"log"
"os"
"os/signal"
"strings"
"syscall"
"git.netra.pivpav.com/public/tell-me/config"
"git.netra.pivpav.com/public/tell-me/llm"
"tell-me/config"
"tell-me/llm"
"tell-me/mcp"
"github.com/sashabaranov/go-openai"
)
@@ -19,29 +22,42 @@ func main() {
cfg, err := config.Load()
if err != nil {
fmt.Fprintf(os.Stderr, "Error loading configuration: %v\n", err)
fmt.Fprintf(os.Stderr, "Please create ~/.config/tell-me.ini from tell-me.ini.example\n")
fmt.Fprintf(os.Stderr, "Please create ~/.config/tell-me.yaml from tell-me.yaml.example\n")
os.Exit(1)
}
// Create LLM client
ctx := context.Background()
// Initialize MCP manager
mcpManager := mcp.NewManager(ctx)
defer mcpManager.Close()
// Connect to MCP servers if configured
if len(cfg.MCPServers) > 0 {
fmt.Println("Connecting to MCP servers...")
if err := mcpManager.ConnectServers(cfg.MCPServers); err != nil {
log.Printf("Warning: Failed to connect to some MCP servers: %v", err)
}
}
// Create LLM client with MCP manager
client := llm.NewClient(
cfg.LLM.APIURL,
cfg.LLM.APIKey,
cfg.LLM.Model,
cfg.LLM.ContextSize,
cfg.SearXNG.URL,
cfg.APIURL,
cfg.APIKey,
cfg.Model,
cfg.ContextSize,
cfg.SearXNGURL,
mcpManager,
)
// Initialize conversation with system prompt
// Initialize conversation with system prompt from config
messages := []openai.ChatCompletionMessage{
{
Role: openai.ChatMessageRoleSystem,
Content: llm.GetSystemPrompt(),
Content: llm.GetSystemPrompt(cfg.Prompt),
},
}
ctx := context.Background()
// Check if arguments are provided (non-interactive mode)
if len(os.Args) > 1 {
query := strings.Join(os.Args[1:], " ")
@@ -58,14 +74,21 @@ func main() {
os.Exit(0)
}()
// Print welcome message
// Print welcome message with MCP status
fmt.Println("╔════════════════════════════════════════════════════════════════╗")
fmt.Println("║ Tell-Me CLI ║")
fmt.Println("║ AI-powered search with local LLM support ║")
fmt.Println("╚════════════════════════════════════════════════════════════════╝")
fmt.Println()
fmt.Printf("Using model: %s\n", cfg.LLM.Model)
fmt.Printf("SearXNG: %s\n", cfg.SearXNG.URL)
fmt.Printf("Using model: %s\n", cfg.Model)
fmt.Printf("SearXNG: %s\n", cfg.SearXNGURL)
// Display MCP server status
if len(cfg.MCPServers) > 0 {
fmt.Println()
displayMCPStatusInline(mcpManager)
}
fmt.Println()
fmt.Println("Type your questions below. Type 'exit' or 'quit' to exit, or press Ctrl-C.")
fmt.Println("────────────────────────────────────────────────────────────────")
@@ -131,3 +154,29 @@ func processQuery(ctx context.Context, client *llm.Client, messages []openai.Cha
return messages
}
// displayMCPStatusInline shows MCP server status in the header
func displayMCPStatusInline(manager *mcp.Manager) {
statuses := manager.GetDetailedStatus()
if len(statuses) == 0 {
return
}
fmt.Print("MCP Servers: ")
for i, status := range statuses {
if i > 0 {
fmt.Print(", ")
}
if status.Error != "" {
// Red X for error
fmt.Printf("\033[31m✗\033[0m %s", status.Name)
} else {
// Green checkmark for OK
fmt.Printf("\033[32m✓\033[0m %s (%d tools)", status.Name, len(status.Tools))
}
}
fmt.Println()
}