auto deployment und tests

This commit is contained in:
Christoph K.
2026-03-20 07:07:38 +01:00
parent 0e7aa3e7f2
commit 8163f906cc
12 changed files with 500 additions and 66 deletions

View File

@@ -13,6 +13,7 @@ import (
openai "github.com/sashabaranov/go-openai"
"google.golang.org/grpc/metadata"
"my-brain-importer/internal/agents"
"my-brain-importer/internal/config"
)
@@ -25,7 +26,8 @@ type KnowledgeChunk struct {
// AskQuery sucht relevante Chunks und generiert eine LLM-Antwort.
// Gibt die Antwort als String und die verwendeten Quellen zurück.
func AskQuery(question string) (string, []KnowledgeChunk, error) {
// history enthält vorherige Gesprächsnachrichten (optional, nil für stateless).
func AskQuery(question string, history []agents.HistoryMessage) (string, []KnowledgeChunk, error) {
ctx := context.Background()
ctx = metadata.AppendToOutgoingContext(ctx, "api-key", config.Cfg.Qdrant.APIKey)
@@ -58,17 +60,29 @@ Basierend auf diesen Informationen, beantworte bitte folgende Frage:
slog.Debug("[LLM] AskQuery Prompt",
"model", config.Cfg.Chat.Model,
"history_len", len(history),
"system", systemPrompt,
"user", userPrompt,
)
msgs := []openai.ChatCompletionMessage{
{Role: openai.ChatMessageRoleSystem, Content: systemPrompt},
}
for _, h := range history {
msgs = append(msgs, openai.ChatCompletionMessage{
Role: h.Role,
Content: h.Content,
})
}
msgs = append(msgs, openai.ChatCompletionMessage{
Role: openai.ChatMessageRoleUser,
Content: userPrompt,
})
start := time.Now()
stream, err := chatClient.CreateChatCompletionStream(ctx, openai.ChatCompletionRequest{
Model: config.Cfg.Chat.Model,
Messages: []openai.ChatCompletionMessage{
{Role: openai.ChatMessageRoleSystem, Content: systemPrompt},
{Role: openai.ChatMessageRoleUser, Content: userPrompt},
},
Model: config.Cfg.Chat.Model,
Messages: msgs,
Temperature: 0.7,
MaxTokens: 500,
})
@@ -102,7 +116,7 @@ func Ask(question string) {
fmt.Printf("🤔 Frage: \"%s\"\n\n", question)
fmt.Println("🔍 Durchsuche lokale Wissensdatenbank...")
answer, chunks, err := AskQuery(question)
answer, chunks, err := AskQuery(question, nil)
if err != nil {
log.Fatalf("❌ %v", err)
}