Initial commit: my-brain-importer RAG knowledge management agent
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
157
internal/brain/ask.go
Executable file
157
internal/brain/ask.go
Executable file
@@ -0,0 +1,157 @@
|
||||
// ask.go – Sucht relevante Chunks in Qdrant und beantwortet Fragen mit einem LLM
|
||||
package brain
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log"
|
||||
"strings"
|
||||
|
||||
pb "github.com/qdrant/go-client/qdrant"
|
||||
openai "github.com/sashabaranov/go-openai"
|
||||
"google.golang.org/grpc/metadata"
|
||||
|
||||
"my-brain-importer/internal/config"
|
||||
)
|
||||
|
||||
// KnowledgeChunk repräsentiert ein Suchergebnis aus Qdrant.
|
||||
type KnowledgeChunk struct {
|
||||
Text string
|
||||
Score float32
|
||||
Source string
|
||||
}
|
||||
|
||||
// Ask sucht relevante Chunks und generiert eine LLM-Antwort per Streaming.
|
||||
func Ask(question string) {
|
||||
ctx := context.Background()
|
||||
ctx = metadata.AppendToOutgoingContext(ctx, "api-key", config.Cfg.Qdrant.APIKey)
|
||||
|
||||
fmt.Printf("🤔 Frage: \"%s\"\n\n", question)
|
||||
|
||||
embClient := config.NewEmbeddingClient()
|
||||
chatClient := config.NewChatClient()
|
||||
|
||||
fmt.Println("🔍 Durchsuche lokale Wissensdatenbank...")
|
||||
chunks := searchKnowledge(ctx, embClient, question)
|
||||
|
||||
if len(chunks) == 0 {
|
||||
fmt.Println("\n❌ Keine relevanten Informationen in der Datenbank gefunden.")
|
||||
fmt.Println(" Füge mehr Daten mit './bin/ingest' hinzu.")
|
||||
return
|
||||
}
|
||||
|
||||
contextText := buildContext(chunks)
|
||||
fmt.Printf("✅ %d relevante Informationen gefunden\n\n", len(chunks))
|
||||
|
||||
systemPrompt := `Du bist ein hilfreicher persönlicher Assistent.
|
||||
Deine Aufgabe ist es, Fragen basierend auf den bereitgestellten Informationen zu beantworten.
|
||||
|
||||
WICHTIGE REGELN:
|
||||
- Antworte nur basierend auf den bereitgestellten Informationen
|
||||
- Wenn die Informationen die Frage nicht beantworten, sage das ehrlich
|
||||
- Antworte auf Deutsch
|
||||
- Sei präzise und direkt
|
||||
- Erfinde keine Informationen hinzu`
|
||||
|
||||
userPrompt := fmt.Sprintf(`Hier sind die relevanten Informationen aus meiner Wissensdatenbank:
|
||||
|
||||
%s
|
||||
|
||||
Basierend auf diesen Informationen, beantworte bitte folgende Frage:
|
||||
%s`, contextText, question)
|
||||
|
||||
fmt.Println("🧠 Generiere Antwort mit lokalem Modell...")
|
||||
fmt.Println(strings.Repeat("═", 80))
|
||||
|
||||
stream, err := chatClient.CreateChatCompletionStream(ctx, openai.ChatCompletionRequest{
|
||||
Model: config.Cfg.Chat.Model,
|
||||
Messages: []openai.ChatCompletionMessage{
|
||||
{Role: openai.ChatMessageRoleSystem, Content: systemPrompt},
|
||||
{Role: openai.ChatMessageRoleUser, Content: userPrompt},
|
||||
},
|
||||
Temperature: 0.7,
|
||||
MaxTokens: 500,
|
||||
})
|
||||
if err != nil {
|
||||
log.Fatalf("❌ LLM Fehler: %v", err)
|
||||
}
|
||||
defer stream.Close()
|
||||
|
||||
fmt.Println("\n💬 Antwort:\n")
|
||||
for {
|
||||
response, err := stream.Recv()
|
||||
if err != nil {
|
||||
break
|
||||
}
|
||||
if len(response.Choices) > 0 {
|
||||
fmt.Print(response.Choices[0].Delta.Content)
|
||||
}
|
||||
}
|
||||
|
||||
fmt.Println("\n")
|
||||
fmt.Println(strings.Repeat("═", 80))
|
||||
fmt.Println("\n📚 Verwendete Quellen:")
|
||||
for i, chunk := range chunks {
|
||||
preview := chunk.Text
|
||||
if len(preview) > 80 {
|
||||
preview = preview[:80] + "..."
|
||||
}
|
||||
fmt.Printf(" [%d] %.1f%% - %s\n", i+1, chunk.Score*100, preview)
|
||||
}
|
||||
}
|
||||
|
||||
func searchKnowledge(ctx context.Context, embClient *openai.Client, query string) []KnowledgeChunk {
|
||||
embResp, err := embClient.CreateEmbeddings(ctx, openai.EmbeddingRequest{
|
||||
Input: []string{query},
|
||||
Model: openai.EmbeddingModel(config.Cfg.Embedding.Model),
|
||||
})
|
||||
if err != nil {
|
||||
log.Printf("❌ Embedding Fehler: %v", err)
|
||||
return nil
|
||||
}
|
||||
|
||||
conn := config.NewQdrantConn()
|
||||
defer conn.Close()
|
||||
|
||||
searchResult, err := pb.NewPointsClient(conn).Search(ctx, &pb.SearchPoints{
|
||||
CollectionName: config.Cfg.Qdrant.Collection,
|
||||
Vector: embResp.Data[0].Embedding,
|
||||
Limit: config.Cfg.TopK,
|
||||
WithPayload: &pb.WithPayloadSelector{
|
||||
SelectorOptions: &pb.WithPayloadSelector_Enable{Enable: true},
|
||||
},
|
||||
ScoreThreshold: floatPtr(0.5),
|
||||
})
|
||||
if err != nil {
|
||||
log.Printf("❌ Suche fehlgeschlagen: %v", err)
|
||||
return nil
|
||||
}
|
||||
|
||||
var chunks []KnowledgeChunk
|
||||
seen := make(map[string]bool)
|
||||
for _, hit := range searchResult.Result {
|
||||
text := hit.Payload["text"].GetStringValue()
|
||||
if seen[text] {
|
||||
continue
|
||||
}
|
||||
seen[text] = true
|
||||
chunks = append(chunks, KnowledgeChunk{
|
||||
Text: text,
|
||||
Score: hit.Score,
|
||||
Source: hit.Payload["source"].GetStringValue(),
|
||||
})
|
||||
}
|
||||
return chunks
|
||||
}
|
||||
|
||||
func buildContext(chunks []KnowledgeChunk) string {
|
||||
var b strings.Builder
|
||||
for i, chunk := range chunks {
|
||||
fmt.Fprintf(&b, "--- Information %d (Relevanz: %.1f%%) ---\n", i+1, chunk.Score*100)
|
||||
b.WriteString(chunk.Text)
|
||||
b.WriteString("\n\n")
|
||||
}
|
||||
return b.String()
|
||||
}
|
||||
|
||||
func floatPtr(f float32) *float32 { return &f }
|
||||
Reference in New Issue
Block a user