346 lines
8.8 KiB
Go
346 lines
8.8 KiB
Go
package agent
|
|
|
|
import (
|
|
"context"
|
|
"encoding/json"
|
|
"fmt"
|
|
"strings"
|
|
"time"
|
|
|
|
"github.com/openai/openai-go"
|
|
oaioption "github.com/openai/openai-go/option"
|
|
|
|
"llm-agent/prd"
|
|
)
|
|
|
|
const (
|
|
baseURL = "http://127.0.0.1:12434/v1"
|
|
maxRetries = 3
|
|
maxTurns = 10
|
|
)
|
|
|
|
var systemPrompt = `Du bist ein autonomer Coding-Agent.
|
|
Erledige den gegebenen Task vollständig mit den bereitgestellten Tools.
|
|
Rufe task_complete auf sobald der Task erledigt ist.
|
|
Nutze ausschließlich relative Pfade.`
|
|
|
|
type AgentLoop struct {
|
|
client *openai.Client
|
|
model string
|
|
workDir string
|
|
prdFile string
|
|
log *Logger
|
|
}
|
|
|
|
func NewAgentLoop(model, workDir, prdFile string, verbose bool) *AgentLoop {
|
|
client := openai.NewClient(
|
|
oaioption.WithBaseURL(baseURL),
|
|
oaioption.WithAPIKey("ollama"),
|
|
)
|
|
return &AgentLoop{
|
|
client: &client,
|
|
model: model,
|
|
workDir: workDir,
|
|
prdFile: prdFile,
|
|
log: NewLogger(verbose),
|
|
}
|
|
}
|
|
|
|
func (a *AgentLoop) Run() error {
|
|
tasks, err := prd.ParseTasks(a.prdFile)
|
|
if err != nil {
|
|
return fmt.Errorf("PRD lesen fehlgeschlagen: %w", err)
|
|
}
|
|
|
|
pending := 0
|
|
for _, t := range tasks {
|
|
if !t.Completed {
|
|
pending++
|
|
}
|
|
}
|
|
a.log.Info("📋 %d Tasks gefunden, %d offen", len(tasks), pending)
|
|
|
|
for _, task := range tasks {
|
|
if task.Completed {
|
|
a.log.Info("✅ Überspringe (bereits erledigt): %s", task.Title)
|
|
continue
|
|
}
|
|
|
|
a.log.TaskStart(task.Title)
|
|
|
|
success := false
|
|
var lastErr error
|
|
for attempt := 1; attempt <= maxRetries; attempt++ {
|
|
if attempt > 1 {
|
|
a.log.Info("🔁 Retry %d/%d...", attempt, maxRetries)
|
|
time.Sleep(time.Duration(attempt) * 2 * time.Second)
|
|
}
|
|
|
|
if err := a.runTask(task); err == nil {
|
|
success = true
|
|
break
|
|
} else {
|
|
lastErr = err
|
|
a.log.Info("⚠️ Fehler: %v", err)
|
|
}
|
|
}
|
|
|
|
if success {
|
|
prd.MarkTaskComplete(a.prdFile, task.Title)
|
|
a.log.TaskDone(task.Title)
|
|
} else {
|
|
a.log.TaskFailed(task.Title, maxRetries)
|
|
_ = lastErr
|
|
}
|
|
}
|
|
|
|
a.log.Info("\n🎉 Alle Tasks abgearbeitet!")
|
|
return nil
|
|
}
|
|
|
|
func (a *AgentLoop) runTask(task prd.Task) error {
|
|
executor := NewToolExecutor(a.workDir)
|
|
|
|
messages := []openai.ChatCompletionMessageParamUnion{
|
|
openai.SystemMessage(systemPrompt),
|
|
openai.UserMessage(fmt.Sprintf(
|
|
"Task: %s\nArbeitsverzeichnis: %s",
|
|
task.Title, a.workDir,
|
|
)),
|
|
}
|
|
|
|
a.log.ChatMessage("system", systemPrompt)
|
|
a.log.ChatMessage("user", fmt.Sprintf(
|
|
"Task: %s\nArbeitsverzeichnis: %s",
|
|
task.Title, a.workDir,
|
|
))
|
|
|
|
for turn := 0; turn < maxTurns; turn++ {
|
|
a.log.Turn(turn + 1)
|
|
|
|
totalChars := 0
|
|
for _, m := range messages {
|
|
totalChars += len(fmt.Sprintf("%v", m))
|
|
}
|
|
|
|
start := time.Now()
|
|
a.log.Debug("MODEL REQUEST: model=%s ~%d Zeichen\n%s",
|
|
a.model, totalChars, formatMessages(messages))
|
|
|
|
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute)
|
|
resp, err := a.client.Chat.Completions.New(
|
|
ctx,
|
|
openai.ChatCompletionNewParams{
|
|
Model: a.model,
|
|
Messages: messages,
|
|
Tools: Tools,
|
|
},
|
|
)
|
|
cancel()
|
|
|
|
elapsed := time.Since(start)
|
|
if resp != nil && len(resp.Choices) > 0 {
|
|
a.log.Debug("MODEL RESPONSE\n%s", formatResponse(resp, elapsed))
|
|
}
|
|
if err != nil {
|
|
return fmt.Errorf("API-Fehler (~%d Zeichen): %w", totalChars, err)
|
|
}
|
|
|
|
choice := resp.Choices[0]
|
|
messages = append(messages, choice.Message.ToParam())
|
|
|
|
// Echte Tool-Calls vom SDK
|
|
toolCalls := choice.Message.ToolCalls
|
|
|
|
// Fallback: XML-Format parsen wenn Modell kein natives Tool Calling nutzt
|
|
if len(toolCalls) == 0 && strings.Contains(choice.Message.Content, "<function=") {
|
|
a.log.Debug("XML-Fallback: Parse Tool-Calls aus Content")
|
|
toolCalls = parseXMLToolCalls(choice.Message.Content)
|
|
}
|
|
|
|
// Kein Tool-Call → LLM hat nur Text geantwortet
|
|
if len(toolCalls) == 0 {
|
|
a.log.ChatMessage("assistant", choice.Message.Content)
|
|
nudge := "Nutze die bereitgestellten Tools. Rufe task_complete auf wenn du fertig bist."
|
|
a.log.ChatMessage("user", nudge)
|
|
messages = append(messages, openai.UserMessage(nudge))
|
|
continue
|
|
}
|
|
|
|
// Tool-Calls ausführen
|
|
for _, toolCall := range toolCalls {
|
|
a.log.Info(" 🔧 %s(%s)",
|
|
toolCall.Function.Name,
|
|
truncate(toolCall.Function.Arguments, 80),
|
|
)
|
|
|
|
result, done := executor.Execute(toolCall)
|
|
a.log.ChatMessage("tool",
|
|
fmt.Sprintf("%s → %s", toolCall.Function.Name, result))
|
|
|
|
// Tool-Ergebnis zurück ans LLM
|
|
messages = append(messages, openai.ToolMessage(result, toolCall.ID))
|
|
|
|
if done {
|
|
return nil // task_complete → Erfolg
|
|
}
|
|
}
|
|
}
|
|
|
|
return fmt.Errorf("maximale Turns (%d) erreicht", maxTurns)
|
|
}
|
|
|
|
// ─── XML Fallback Parser ──────────────────────────────────
|
|
|
|
// parseXMLToolCalls parst Tool-Calls im XML-Format das manche Modelle nutzen:
|
|
//
|
|
// <function=write_file>
|
|
// <parameter=path>hello.go</parameter>
|
|
// <parameter=content>package main...</parameter>
|
|
// </function>
|
|
func parseXMLToolCalls(content string) []openai.ChatCompletionMessageToolCall {
|
|
var calls []openai.ChatCompletionMessageToolCall
|
|
remaining := content
|
|
callID := 0
|
|
|
|
for {
|
|
// Funktionsname extrahieren
|
|
start := strings.Index(remaining, "<function=")
|
|
if start == -1 {
|
|
break
|
|
}
|
|
nameStart := start + len("<function=")
|
|
nameEnd := strings.Index(remaining[nameStart:], ">")
|
|
if nameEnd == -1 {
|
|
break
|
|
}
|
|
funcName := strings.TrimSpace(remaining[nameStart : nameStart+nameEnd])
|
|
|
|
// Block bis </function> extrahieren
|
|
blockEnd := strings.Index(remaining, "</function>")
|
|
if blockEnd == -1 {
|
|
break
|
|
}
|
|
block := remaining[start : blockEnd+len("</function>")]
|
|
|
|
// Parameter extrahieren und als JSON serialisieren
|
|
params := extractXMLParams(block)
|
|
argsJSON, err := json.Marshal(params)
|
|
if err != nil {
|
|
remaining = remaining[blockEnd+len("</function>"):]
|
|
continue
|
|
}
|
|
|
|
callID++
|
|
calls = append(calls, openai.ChatCompletionMessageToolCall{
|
|
ID: fmt.Sprintf("xml-call-%d", callID),
|
|
Type: "function",
|
|
Function: openai.ChatCompletionMessageToolCallFunction{
|
|
Name: funcName,
|
|
Arguments: string(argsJSON),
|
|
},
|
|
})
|
|
|
|
remaining = remaining[blockEnd+len("</function>"):]
|
|
}
|
|
|
|
return calls
|
|
}
|
|
|
|
// extractXMLParams extrahiert alle <parameter=key>value</parameter> aus einem Block
|
|
func extractXMLParams(block string) map[string]string {
|
|
params := make(map[string]string)
|
|
remaining := block
|
|
|
|
for {
|
|
start := strings.Index(remaining, "<parameter=")
|
|
if start == -1 {
|
|
break
|
|
}
|
|
|
|
// Key extrahieren
|
|
keyStart := start + len("<parameter=")
|
|
keyEnd := strings.Index(remaining[keyStart:], ">")
|
|
if keyEnd == -1 {
|
|
break
|
|
}
|
|
key := strings.TrimSpace(remaining[keyStart : keyStart+keyEnd])
|
|
|
|
// Value extrahieren
|
|
valueStart := keyStart + keyEnd + 1
|
|
closeTag := "</parameter>"
|
|
valueEnd := strings.Index(remaining[valueStart:], closeTag)
|
|
if valueEnd == -1 {
|
|
break
|
|
}
|
|
value := strings.TrimSpace(remaining[valueStart : valueStart+valueEnd])
|
|
|
|
params[key] = value
|
|
remaining = remaining[valueStart+valueEnd+len(closeTag):]
|
|
}
|
|
|
|
return params
|
|
}
|
|
|
|
// ─── Hilfsfunktionen ─────────────────────────────────────
|
|
|
|
func truncate(s string, max int) string {
|
|
if len(s) <= max {
|
|
return s
|
|
}
|
|
return s[:max] + "..."
|
|
}
|
|
|
|
func formatMessages(messages []openai.ChatCompletionMessageParamUnion) string {
|
|
var sb strings.Builder
|
|
for i, m := range messages {
|
|
var role, content string
|
|
switch {
|
|
case m.OfSystem != nil:
|
|
role = "system"
|
|
content = m.OfSystem.Content.OfString.Value
|
|
case m.OfUser != nil:
|
|
role = "user"
|
|
content = m.OfUser.Content.OfString.Value
|
|
case m.OfAssistant != nil:
|
|
role = "assistant"
|
|
content = m.OfAssistant.Content.OfString.Value
|
|
default:
|
|
role = "other"
|
|
}
|
|
preview := strings.ReplaceAll(truncate(content, 120), "\n", "↵")
|
|
sb.WriteString(fmt.Sprintf(" [%d] %-10s : %s\n", i, role, preview))
|
|
}
|
|
return sb.String()
|
|
}
|
|
|
|
func formatResponse(resp *openai.ChatCompletion, elapsed time.Duration) string {
|
|
var sb strings.Builder
|
|
sb.WriteString(fmt.Sprintf(" ID : %s\n", resp.ID))
|
|
sb.WriteString(fmt.Sprintf(" Modell : %s\n", resp.Model))
|
|
sb.WriteString(fmt.Sprintf(" Elapsed : %s\n", elapsed.Round(time.Millisecond)))
|
|
sb.WriteString(fmt.Sprintf(" Finish-Reason : %s\n", resp.Choices[0].FinishReason))
|
|
sb.WriteString(fmt.Sprintf(" Tokens : prompt=%d completion=%d total=%d\n",
|
|
resp.Usage.PromptTokens,
|
|
resp.Usage.CompletionTokens,
|
|
resp.Usage.TotalTokens,
|
|
))
|
|
|
|
if len(resp.Choices[0].Message.ToolCalls) > 0 {
|
|
sb.WriteString(" Tool-Calls :\n")
|
|
for _, tc := range resp.Choices[0].Message.ToolCalls {
|
|
sb.WriteString(fmt.Sprintf(" → %s(%s)\n",
|
|
tc.Function.Name,
|
|
truncate(tc.Function.Arguments, 100),
|
|
))
|
|
}
|
|
} else if content := resp.Choices[0].Message.Content; content != "" {
|
|
sb.WriteString(" Content :\n")
|
|
for _, line := range strings.Split(content, "\n") {
|
|
sb.WriteString(fmt.Sprintf(" %s\n", line))
|
|
}
|
|
}
|
|
|
|
return sb.String()
|
|
}
|