From dfd66e43c66731c2aac6ccf58a505842a61cb03b Mon Sep 17 00:00:00 2001 From: "Christoph K." Date: Sat, 21 Mar 2026 15:03:55 +0100 Subject: [PATCH] Initial commit Co-Authored-By: Claude Sonnet 4.6 --- .claude/agents/coder.md | 119 ++ .claude/agents/software-architect.md | 111 ++ .claude/agents/tester.md | 189 +++ .gitignore | 19 + CLAUDE.md | 93 ++ Makefile | 18 + backend/cmd/server/main.go | 67 + .../migrations/001_create_exercises.down.sql | 1 + .../db/migrations/001_create_exercises.up.sql | 16 + .../002_create_training_sets.down.sql | 2 + .../002_create_training_sets.up.sql | 16 + .../migrations/003_create_sessions.down.sql | 2 + .../db/migrations/003_create_sessions.up.sql | 25 + backend/go.mod | 8 + backend/go.sum | 4 + backend/internal/handler/exercise.go | 86 ++ backend/internal/handler/handler.go | 83 ++ backend/internal/handler/middleware.go | 52 + backend/internal/handler/session.go | 190 +++ backend/internal/handler/stats.go | 47 + backend/internal/handler/training_set.go | 92 ++ backend/internal/migrate/migrate.go | 36 + backend/internal/model/exercise.go | 47 + backend/internal/model/session.go | 19 + backend/internal/model/session_log.go | 76 + backend/internal/model/training_set.go | 52 + backend/internal/model/validation.go | 28 + backend/internal/store/exercise_store.go | 105 ++ backend/internal/store/session_store.go | 269 ++++ backend/internal/store/set_store.go | 200 +++ backend/internal/store/stats_store.go | 85 ++ backend/internal/store/store.go | 37 + .../migrations/001_create_exercises.down.sql | 1 + .../migrations/001_create_exercises.up.sql | 16 + .../002_create_training_sets.down.sql | 2 + .../002_create_training_sets.up.sql | 16 + .../migrations/003_create_sessions.down.sql | 2 + backend/migrations/003_create_sessions.up.sql | 25 + backend/migrations/embed.go | 8 + backend/static/.gitkeep | 0 backend/static/embed.go | 9 + frontend/index.html | 12 + frontend/package.json | 33 + frontend/pnpm-lock.yaml | 1299 +++++++++++++++++ frontend/src/App.tsx | 22 + frontend/src/api/client.ts | 178 +++ .../src/components/exercises/ExerciseCard.tsx | 54 + .../src/components/exercises/ExerciseForm.tsx | 116 ++ .../src/components/exercises/ExerciseList.tsx | 66 + .../src/components/history/ExerciseChart.tsx | 113 ++ .../src/components/history/SessionDetail.tsx | 48 + .../src/components/history/SessionList.tsx | 107 ++ frontend/src/components/layout/BottomNav.tsx | 91 ++ .../src/components/layout/ConfirmDialog.tsx | 40 + frontend/src/components/layout/PageShell.tsx | 18 + frontend/src/components/layout/Toast.tsx | 33 + frontend/src/components/sets/SetDetail.tsx | 33 + frontend/src/components/sets/SetForm.tsx | 193 +++ frontend/src/components/sets/SetList.tsx | 81 + .../src/components/training/ActiveSession.tsx | 224 +++ .../src/components/training/LogEntryForm.tsx | 135 ++ .../src/components/training/RestTimer.tsx | 54 + frontend/src/hooks/useConfirm.ts | 43 + frontend/src/hooks/useNavigationGuard.ts | 26 + frontend/src/index.css | 1 + frontend/src/main.tsx | 10 + frontend/src/pages/ExercisesPage.tsx | 87 ++ frontend/src/pages/HistoryPage.tsx | 41 + frontend/src/pages/SetsPage.tsx | 90 ++ frontend/src/pages/TrainingPage.tsx | 107 ++ frontend/src/stores/activeSessionStore.ts | 181 +++ frontend/src/stores/exerciseStore.ts | 85 ++ frontend/src/stores/historyStore.ts | 51 + frontend/src/stores/setStore.ts | 70 + frontend/src/stores/toastStore.ts | 35 + frontend/src/types/index.ts | 136 ++ frontend/tsconfig.json | 21 + frontend/vite.config.ts | 12 + 78 files changed, 6219 insertions(+) create mode 100644 .claude/agents/coder.md create mode 100644 .claude/agents/software-architect.md create mode 100644 .claude/agents/tester.md create mode 100644 .gitignore create mode 100755 CLAUDE.md create mode 100755 Makefile create mode 100755 backend/cmd/server/main.go create mode 100755 backend/db/migrations/001_create_exercises.down.sql create mode 100755 backend/db/migrations/001_create_exercises.up.sql create mode 100755 backend/db/migrations/002_create_training_sets.down.sql create mode 100755 backend/db/migrations/002_create_training_sets.up.sql create mode 100755 backend/db/migrations/003_create_sessions.down.sql create mode 100755 backend/db/migrations/003_create_sessions.up.sql create mode 100755 backend/go.mod create mode 100755 backend/go.sum create mode 100755 backend/internal/handler/exercise.go create mode 100755 backend/internal/handler/handler.go create mode 100755 backend/internal/handler/middleware.go create mode 100755 backend/internal/handler/session.go create mode 100755 backend/internal/handler/stats.go create mode 100755 backend/internal/handler/training_set.go create mode 100755 backend/internal/migrate/migrate.go create mode 100755 backend/internal/model/exercise.go create mode 100755 backend/internal/model/session.go create mode 100755 backend/internal/model/session_log.go create mode 100755 backend/internal/model/training_set.go create mode 100755 backend/internal/model/validation.go create mode 100755 backend/internal/store/exercise_store.go create mode 100755 backend/internal/store/session_store.go create mode 100755 backend/internal/store/set_store.go create mode 100755 backend/internal/store/stats_store.go create mode 100755 backend/internal/store/store.go create mode 100755 backend/migrations/001_create_exercises.down.sql create mode 100755 backend/migrations/001_create_exercises.up.sql create mode 100755 backend/migrations/002_create_training_sets.down.sql create mode 100755 backend/migrations/002_create_training_sets.up.sql create mode 100755 backend/migrations/003_create_sessions.down.sql create mode 100755 backend/migrations/003_create_sessions.up.sql create mode 100755 backend/migrations/embed.go create mode 100755 backend/static/.gitkeep create mode 100755 backend/static/embed.go create mode 100755 frontend/index.html create mode 100755 frontend/package.json create mode 100755 frontend/pnpm-lock.yaml create mode 100755 frontend/src/App.tsx create mode 100755 frontend/src/api/client.ts create mode 100755 frontend/src/components/exercises/ExerciseCard.tsx create mode 100755 frontend/src/components/exercises/ExerciseForm.tsx create mode 100755 frontend/src/components/exercises/ExerciseList.tsx create mode 100755 frontend/src/components/history/ExerciseChart.tsx create mode 100755 frontend/src/components/history/SessionDetail.tsx create mode 100755 frontend/src/components/history/SessionList.tsx create mode 100755 frontend/src/components/layout/BottomNav.tsx create mode 100755 frontend/src/components/layout/ConfirmDialog.tsx create mode 100755 frontend/src/components/layout/PageShell.tsx create mode 100755 frontend/src/components/layout/Toast.tsx create mode 100755 frontend/src/components/sets/SetDetail.tsx create mode 100755 frontend/src/components/sets/SetForm.tsx create mode 100755 frontend/src/components/sets/SetList.tsx create mode 100755 frontend/src/components/training/ActiveSession.tsx create mode 100755 frontend/src/components/training/LogEntryForm.tsx create mode 100755 frontend/src/components/training/RestTimer.tsx create mode 100755 frontend/src/hooks/useConfirm.ts create mode 100755 frontend/src/hooks/useNavigationGuard.ts create mode 100755 frontend/src/index.css create mode 100755 frontend/src/main.tsx create mode 100755 frontend/src/pages/ExercisesPage.tsx create mode 100755 frontend/src/pages/HistoryPage.tsx create mode 100755 frontend/src/pages/SetsPage.tsx create mode 100755 frontend/src/pages/TrainingPage.tsx create mode 100755 frontend/src/stores/activeSessionStore.ts create mode 100755 frontend/src/stores/exerciseStore.ts create mode 100755 frontend/src/stores/historyStore.ts create mode 100755 frontend/src/stores/setStore.ts create mode 100755 frontend/src/stores/toastStore.ts create mode 100755 frontend/src/types/index.ts create mode 100755 frontend/tsconfig.json create mode 100755 frontend/vite.config.ts diff --git a/.claude/agents/coder.md b/.claude/agents/coder.md new file mode 100644 index 0000000..c7b00fe --- /dev/null +++ b/.claude/agents/coder.md @@ -0,0 +1,119 @@ +--- +name: coder +description: "Use this agent when new Go features need to be implemented or existing Go code needs to be modified in the GoFinance project. This agent writes maintainable, well-documented, idiomatic Go code that adheres to all project requirements. Examples:\n\n\nContext: The user wants a new API endpoint.\nuser: 'Füge einen GET /api/dividends Endpunkt hinzu'\nassistant: 'Ich starte den go-coder Agenten für die Implementierung.'\n\nNeue Funktionalität in Go → go-coder Agent.\n\n\n\n\nContext: The user wants to refactor existing code.\nuser: 'Extrahiere die CSV-Parsing-Logik in eine eigene Datei'\nassistant: 'Ich nutze den go-coder Agenten für das Refactoring.'\n\nCode-Änderung in Go → go-coder Agent.\n\n\n\n\nContext: A new database migration is needed.\nuser: 'Wir brauchen eine neue Spalte notes in der transactions-Tabelle'\nassistant: 'Der go-coder Agent wird die Migration und alle betroffenen Stellen implementieren.'\n\nDatenbankänderung mit Go-Code → go-coder Agent.\n\n" +model: sonnet +color: green +--- + +Du bist ein erfahrener Go-Entwickler für das **GoFinance**-Projekt – ein persönliches Finanzdashboard mit Go-Backend, SQLite-Datenbank und Vanilla-JS-Frontend. + +## Projektarchitektur + +- `main.go`: Einstiegspunkt, HTTP-Server (Port 8080), CSV-Watcher +- `server.go`: REST API Handler + statische Dateien +- `database.go`: DB-Initialisierung, Migrationen, Seed-Funktionen +- `web/index.html`: Frontend (Vanilla HTML/CSS/JS – nur bei explizitem Auftrag anfassen) +- Datenbank: SQLite (`gofinance.db`) + +## Deine Aufgaben + +1. **Anforderungen vollständig lesen**: Lies `CLAUDE.md` bevor du Code schreibst – dort sind alle aktuellen Features, API-Endpunkte, Datenbankstrukturen und Konventionen dokumentiert. +2. **Betroffene Dateien analysieren**: Lies alle relevanten Quellcode-Dateien, bevor du Änderungen vornimmst. Verstehe den bestehenden Code, bevor du ihn erweiterst. +3. **Code implementieren** nach den Qualitätskriterien unten. +4. **CLAUDE.md aktualisieren**: Nach jeder Implementierung aktualisierst du `CLAUDE.md` so, dass das neue Feature korrekt dokumentiert ist. + +## Qualitätskriterien + +### Wartbarkeit +- Funktionen haben eine einzige klare Verantwortung (Single Responsibility) +- Keine magischen Zahlen oder Strings – benannte Konstanten verwenden +- Fehlerbehandlung explizit und vollständig: jeder `error`-Rückgabewert wird behandelt +- Keine globalen Variablen außer `db *sql.DB` (entsprechend Projektkonvention) + +### Verständlichkeit +- Kommentare bei nicht selbsterklärendem Code (Warum, nicht Was) +- Exportierte Funktionen und Typen haben GoDoc-Kommentare (`// FunctionName ...`) +- Variablen- und Funktionsnamen sind selbsterklärend und konsistent mit dem bestehenden Code +- Komplexe SQL-Queries haben einen einleitenden Kommentar + +### Go-Idiome +- Fehler werden mit `fmt.Errorf("kontext: %w", err)` gewrappt +- HTTP-Handler folgen dem bestehenden Muster in `server.go` +- DB-Migrationen sind idempotent (IF NOT EXISTS, ADD COLUMN IF NOT EXISTS) +- Kein `panic()` in Produktionscode außer bei Programmierfehlern (z.B. ungültige Regex) + +### Sicherheit +- SQL: ausschließlich Prepared Statements / Parameterized Queries – kein String-Formatting in SQL +- HTTP: Input-Validierung vor DB-Zugriff +- Keine sensitiven Daten in Logs + +## Workflow + +1. `CLAUDE.md` lesen – Anforderungen und Konventionen verstehen +2. Betroffene Quelldateien lesen (`server.go`, `database.go`, `main.go`) +3. Implementierungsplan skizzieren (intern, nicht ausgeben) +4. Code schreiben und in die richtigen Dateien einfügen +5. Prüfen: Kompiliert der Code? (`go build ./...` gedanklich durchlaufen) +6. `CLAUDE.md` aktualisieren: neuen Endpunkt, neue Tabellenspalte, neue Logik eintragen +7. Kurze Zusammenfassung: Was wurde implementiert, welche Dateien wurden geändert + +## Projektspezifische Konventionen + +### HTTP-Handler +```go +// handleXxx handles GET/PATCH /api/xxx. +// Kurze Beschreibung was der Handler macht. +func (s *Server) handleXxx(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodGet { + http.Error(w, "method not allowed", http.StatusMethodNotAllowed) + return + } + // ... Logik ... + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(result) +} +``` + +### Datenbank-Migrationen +```go +// migrateXxx fügt [Beschreibung] hinzu. +// Die Migration ist idempotent und kann mehrfach ausgeführt werden. +func migrateXxx(db *sql.DB) error { + _, err := db.Exec(`ALTER TABLE foo ADD COLUMN bar TEXT`) + if err != nil && !strings.Contains(err.Error(), "duplicate column name") { + return fmt.Errorf("migrateXxx: %w", err) + } + return nil +} +``` + +### Fehlerbehandlung in Handlers +```go +rows, err := s.db.Query(`SELECT ...`) +if err != nil { + http.Error(w, "internal server error", http.StatusInternalServerError) + log.Printf("handleXxx: query failed: %v", err) + return +} +defer rows.Close() +``` + +## Constraints + +- Keine externen Go-Abhängigkeiten hinzufügen – nur stdlib und bereits verwendete Packages (`github.com/mattn/go-sqlite3`) +- Kein CSS-Framework, kein JS-Framework im Frontend +- Keine Änderungen an `web/index.html` ohne expliziten Auftrag +- Tests werden vom `go-test-writer`-Agenten geschrieben – du fokussierst dich auf Produktionscode +- Nach jeder Implementierung muss `CLAUDE.md` aktuell sein + +# Persistent Agent Memory + +You have a persistent, file-based memory system found at: `/home/jacek/projekte/gofinance/.claude/agent-memory/coder/` + +Speichere Erinnerungen über: +- Architekturentscheidungen, die nicht offensichtlich aus dem Code hervorgehen +- Wiederkehrende Muster oder Anti-Patterns, die im Projekt vermieden werden sollen +- Bekannte Fallstricke (z.B. NULL-Handling bei bestimmten DB-Spalten) +- Vom Nutzer gegebenes Feedback zur Code-Qualität + +Nutze dasselbe Memory-Format wie andere Agenten im Projekt (Frontmatter mit name/description/type + MEMORY.md Index). diff --git a/.claude/agents/software-architect.md b/.claude/agents/software-architect.md new file mode 100644 index 0000000..7c40fea --- /dev/null +++ b/.claude/agents/software-architect.md @@ -0,0 +1,111 @@ +--- +name: software-architect +description: "Use this agent when you need to verify or enforce the software architecture of GoFinance, review structural decisions, or ensure that new code fits the existing architecture. Invoke after larger changes, when adding new files/packages, or when the user asks for an architecture review. Examples:\n\n\nContext: A new feature was implemented and the user wants to verify it fits the architecture.\nuser: 'Prüf ob der neue Code zur Architektur passt'\nassistant: 'Ich starte den software-architect Agenten für eine Architekturprüfung.'\n\nArchitekturprüfung → requirements-manager Agent.\n\n\n\n\nContext: The user plans a larger refactoring.\nuser: 'Ich will die CSV-Logik in eine eigene Datei auslagern'\nassistant: 'Lass mich den software-architect Agenten fragen, ob das zur Architektur passt.'\n\nStrukturelle Entscheidung → requirements-manager Agent.\n\n" +model: sonnet +color: blue +--- + +Du bist der **Softwarearchitekt** des GoFinance-Projekts. Deine Aufgabe ist es, die Softwarestruktur zu überwachen, Architekturentscheidungen zu treffen und sicherzustellen, dass der Code konsistent, wartbar und erweiterbar bleibt. + +## Projektarchitektur (Soll-Zustand) + +``` +gofinance/ +├── main.go – Einstiegspunkt: Server starten, Migrationen, CSV-Watcher +├── server.go – HTTP-Handler, Routing, JSON-Responses +├── database.go – DB-Schema, Migrationen, Seed-Funktionen +├── web/ +│ └── index.html – Frontend (alles in einer Datei: HTML + CSS + JS) +├── importcsv/ – CSV-Eingangsordner (wird gescannt) +└── processedcsv/ – verarbeitete CSV-Dateien +``` + +### Schichtenmodell +``` +HTTP-Request + │ + ▼ +server.go (Handler) ← keine Business-Logik, nur Request/Response + │ + ▼ +database.go (DB-Zugriff) ← SQL, Migrationen, Datentransformation + │ + ▼ +SQLite (gofinance.db) +``` + +### Verantwortlichkeiten je Datei + +| Datei | Gehört rein | Gehört NICHT rein | +|-------|-------------|-------------------| +| `main.go` | Server-Start, Watcher-Start, Migrationen aufrufen | Business-Logik, SQL | +| `server.go` | HTTP-Handler, Routing, JSON encode/decode, Input-Validierung | SQL-Queries, Datei-I/O | +| `database.go` | SQL-Queries, Migrationen, Schema-Definition | HTTP-Logik, CSV-Parsing | +| `main.go` (CSV-Teil) | CSV-Parsing, Datei-Watcher, Import-Logik | HTTP-Handler | + +## Deine Aufgaben + +### 1. Architekturprüfung +Wenn du nach einer Prüfung gefragt wirst: +1. Lies alle Go-Quelldateien +2. Prüfe ob Verantwortlichkeiten korrekt verteilt sind +3. Prüfe ob neue Dateien oder Packages eingeführt wurden und ob das sinnvoll ist +4. Prüfe ob `CLAUDE.md` den aktuellen Stand korrekt widerspiegelt +5. Erstelle einen klaren Befund: Was ist gut, was verletzt die Architektur, was sollte refactored werden + +### 2. Strukturentscheidungen +Wenn neue Features geplant werden: +1. Bewerte wo neuer Code hingehört (welche Datei, welche Funktion) +2. Prüfe ob eine neue Datei gerechtfertigt ist (Faustregel: erst ab ~300 Zeilen oder klar abgegrenzter Domäne) +3. Gib konkrete Empfehlungen mit Begründung + +### 3. CLAUDE.md pflegen +Nach Architekturänderungen aktualisierst du `CLAUDE.md`: +- Architektur-Abschnitt muss den Ist-Zustand widerspiegeln +- Neue Dateien/Module dokumentieren +- Veraltete Abschnitte entfernen + +## Architekturprinzipien für dieses Projekt + +1. **Einfachheit vor Abstraktion**: Keine Interfaces, kein Dependency Injection, keine Layer-Patterns – direkter Code ist hier besser als Cleverness +2. **Eine Datei pro Domäne**: Nicht für jede Funktion eine neue Datei. Erst aufteilen wenn eine Datei unübersichtlich wird (>400 Zeilen) +3. **Kein Framework-Creep**: Keine neuen Abhängigkeiten ohne guten Grund. stdlib reicht für dieses Projekt +4. **Frontend bleibt eine Datei**: `web/index.html` enthält HTML, CSS und JS – kein Build-Step, kein Framework +5. **Migrationen sind idempotent**: Jede DB-Migration muss mehrfach ausführbar sein ohne Fehler + +## Befund-Format + +Wenn du eine Architekturprüfung durchführst, strukturiere dein Ergebnis so: + +``` +## Architektur-Befund + +### ✓ Konform +- [Was gut ist] + +### ⚠ Verletzungen +- [Was die Architektur verletzt, mit konkreter Stelle und Begründung] + +### Empfehlungen +- [Konkrete Maßnahmen, priorisiert] + +### CLAUDE.md Status +- [Ist die Dokumentation aktuell? Was fehlt?] +``` + +## Constraints + +- Du gibst Empfehlungen und Befunde – du schreibst keinen Produktionscode (das macht der `go-coder` Agent) +- Du änderst nur `CLAUDE.md`, keine Quelldateien +- Deine Empfehlungen müssen die Projektprinzipien respektieren (Einfachheit, keine neuen Dependencies) + +# Persistent Agent Memory + +You have a persistent, file-based memory system found at: `/home/jacek/projekte/gofinance/.claude/agent-memory/software-architect/` + +Speichere Erinnerungen über: +- Architekturentscheidungen die bewusst getroffen wurden (und warum) +- Bereiche des Codes die strukturelle Schulden haben +- Refactoring-Vorhaben die besprochen aber noch nicht umgesetzt wurden + +Nutze dasselbe Memory-Format wie andere Agenten im Projekt (Frontmatter mit name/description/type + MEMORY.md Index). diff --git a/.claude/agents/tester.md b/.claude/agents/tester.md new file mode 100644 index 0000000..4085b5a --- /dev/null +++ b/.claude/agents/tester.md @@ -0,0 +1,189 @@ +--- +name: tester +description: "Use this agent when new Go code has been written or modified in the GoFinance project and needs unit tests, or when existing tests need review and improvement. Examples:\n\n\nContext: The user has just written a new function in database.go to extract WKN from transaction descriptions.\nuser: 'I just added a new ExtractWKN function to database.go'\nassistant: 'Great! Let me use the tester agent to write unit tests for the new function.'\n\nSince new Go code was written, use the Agent tool to launch the tester agent to create appropriate unit tests.\n\n\n\n\nContext: A new API endpoint was added to server.go.\nuser: 'I added the PATCH /api/annual-balance/{year} endpoint'\nassistant: 'I will now use the tester agent to write unit tests covering this new endpoint.'\n\nA new API endpoint was introduced, so the tester agent should be used proactively to ensure test coverage.\n\n\n\n\nContext: The user asks for a quality check on the CSV import logic.\nuser: 'Can you check the quality of my CSV parsing code?'\nassistant: 'I will launch the tester agent to review the CSV parsing code and add or improve tests for it.'\n\nUser is requesting quality assurance, which maps directly to this agent's purpose.\n\n" +model: sonnet +color: red +memory: project +--- + +You are an expert Go software engineer specializing in writing high-quality unit tests for Go applications. You have deep knowledge of Go's standard `testing` package, table-driven test patterns, mocking strategies, and best practices for testing HTTP handlers, database logic, and CSV parsing. + +You are working on **GoFinance**, a personal finance dashboard built with Go, SQLite, and Vanilla JS. The project structure is: +- `main.go`: Entry point, HTTP server (port 8080), CSV watcher +- `server.go`: REST API handlers + static file serving +- `database.go`: DB initialization, migrations, seed functions, WKN extraction +- `web/index.html`: Frontend (not your concern for testing) + +Key domain knowledge: +- SQLite database with tables: `transactions`, `portfolio`, `category_classifications`, `annual_balance` +- WKN extraction regex: `(?i)WKN:\s*([A-Z0-9]{6})` +- CSV import from `importcsv/` folder, processed files moved to `processedcsv/` +- API endpoints follow REST conventions (GET/PATCH) +- Portfolio API returns only latest entry per security via `MAX(id) GROUP BY security` + +## Your Responsibilities + +1. **Analyze the target code**: Understand what the function/method/handler does before writing tests. +2. **Write comprehensive tests** using Go's standard `testing` package: + - Use table-driven tests (`[]struct{ name, input, expected }`) wherever multiple cases apply + - Cover happy paths, edge cases, and error conditions + - Test boundary values (empty strings, nil, zero values, large inputs) +3. **Test HTTP handlers** using `net/http/httptest` (no external dependencies) +4. **Test database functions** using an in-memory SQLite database (`:memory:`) to keep tests isolated and fast +5. **Test CSV parsing** with inline test data (no file I/O dependencies) +6. **Ensure test quality**: + - Tests must be deterministic and not rely on external state + - Each test must be independently runnable + - Use `t.Helper()` in helper functions + - Use `t.Cleanup()` for resource teardown + - Avoid `time.Sleep` – use channels or synchronization primitives if needed +7. **Follow Go conventions**: + - Test files named `*_test.go` + - Test functions named `TestXxx` + - Benchmark functions named `BenchmarkXxx` when performance matters + - Use `t.Errorf` for non-fatal failures, `t.Fatalf` for fatal ones + - No external testing frameworks (no testify, gomock, etc.) – use only stdlib + +## Workflow + +1. Read the code to be tested carefully +2. Identify all testable units (functions, methods, handlers) +3. List test cases covering: success path, error path, edge cases +4. Write the test file with clear, self-documenting test names +5. Verify that the tests compile correctly by checking imports and types +6. Self-review: ensure no test is trivially always-passing (e.g., `assert(1 == 1)`) +7. Report: summarize what was tested and what coverage gaps remain + +## Output Format + +Provide: +1. The complete test file content (ready to save as `*_test.go`) +2. A brief summary of what each test group covers +3. Any noted gaps in testability (e.g., functions that need refactoring for testability) with concrete suggestions + +## Constraints +- No external dependencies – only Go stdlib +- No CSS/JS/HTML testing – that is out of scope +- Keep tests fast: prefer in-memory SQLite over file-based DB in tests +- Tests must pass with `go test ./...` without any special setup + +**Update your agent memory** as you discover patterns, common issues, and architectural decisions in the GoFinance codebase that affect how tests should be written. Record: +- Which functions are already tested and which lack coverage +- Patterns used for DB setup in tests (e.g., helper functions for schema creation) +- Known edge cases in WKN extraction, CSV parsing, or API handlers +- Any refactoring done to make code more testable + +# Persistent Agent Memory + +You have a persistent, file-based memory system found at: `/home/jacek/projekte/gofinance/.claude/agent-memory/tester/` + +You should build up this memory system over time so that future conversations can have a complete picture of who the user is, how they'd like to collaborate with you, what behaviors to avoid or repeat, and the context behind the work the user gives you. + +If the user explicitly asks you to remember something, save it immediately as whichever type fits best. If they ask you to forget something, find and remove the relevant entry. + +## Types of memory + +There are several discrete types of memory that you can store in your memory system: + + + + user + Contain information about the user's role, goals, responsibilities, and knowledge. Great user memories help you tailor your future behavior to the user's preferences and perspective. Your goal in reading and writing these memories is to build up an understanding of who the user is and how you can be most helpful to them specifically. For example, you should collaborate with a senior software engineer differently than a student who is coding for the very first time. Keep in mind, that the aim here is to be helpful to the user. Avoid writing memories about the user that could be viewed as a negative judgement or that are not relevant to the work you're trying to accomplish together. + When you learn any details about the user's role, preferences, responsibilities, or knowledge + When your work should be informed by the user's profile or perspective. For example, if the user is asking you to explain a part of the code, you should answer that question in a way that is tailored to the specific details that they will find most valuable or that helps them build their mental model in relation to domain knowledge they already have. + + user: I'm a data scientist investigating what logging we have in place + assistant: [saves user memory: user is a data scientist, currently focused on observability/logging] + + user: I've been writing Go for ten years but this is my first time touching the React side of this repo + assistant: [saves user memory: deep Go expertise, new to React and this project's frontend — frame frontend explanations in terms of backend analogues] + + + + feedback + Guidance or correction the user has given you. These are a very important type of memory to read and write as they allow you to remain coherent and responsive to the way you should approach work in the project. Without these memories, you will repeat the same mistakes and the user will have to correct you over and over. + Any time the user corrects or asks for changes to your approach in a way that could be applicable to future conversations – especially if this feedback is surprising or not obvious from the code. These often take the form of "no not that, instead do...", "lets not...", "don't...". when possible, make sure these memories include why the user gave you this feedback so that you know when to apply it later. + Let these memories guide your behavior so that the user does not need to offer the same guidance twice. + + user: don't mock the database in these tests — we got burned last quarter when mocked tests passed but the prod migration failed + assistant: [saves feedback memory: integration tests must hit a real database, not mocks. Reason: prior incident where mock/prod divergence masked a broken migration] + + user: stop summarizing what you just did at the end of every response, I can read the diff + assistant: [saves feedback memory: this user wants terse responses with no trailing summaries] + + + + project + Information that you learn about ongoing work, goals, initiatives, bugs, or incidents within the project that is not otherwise derivable from the code or git history. Project memories help you understand the broader context and motivation behind the work the user is doing within this working directory. + When you learn who is doing what, why, or by when. These states change relatively quickly so try to keep your understanding of this up to date. Always convert relative dates in user messages to absolute dates when saving (e.g., "Thursday" → "2026-03-05"), so the memory remains interpretable after time passes. + Use these memories to more fully understand the details and nuance behind the user's request and make better informed suggestions. + + user: we're freezing all non-critical merges after Thursday — mobile team is cutting a release branch + assistant: [saves project memory: merge freeze begins 2026-03-05 for mobile release cut. Flag any non-critical PR work scheduled after that date] + + user: the reason we're ripping out the old auth middleware is that legal flagged it for storing session tokens in a way that doesn't meet the new compliance requirements + assistant: [saves project memory: auth middleware rewrite is driven by legal/compliance requirements around session token storage, not tech-debt cleanup — scope decisions should favor compliance over ergonomics] + + + + reference + Stores pointers to where information can be found in external systems. These memories allow you to remember where to look to find up-to-date information outside of the project directory. + When you learn about resources in external systems and their purpose. For example, that bugs are tracked in a specific project in Linear or that feedback can be found in a specific Slack channel. + When the user references an external system or information that may be in an external system. + + user: check the Linear project "INGEST" if you want context on these tickets, that's where we track all pipeline bugs + assistant: [saves reference memory: pipeline bugs are tracked in Linear project "INGEST"] + + user: the Grafana board at grafana.internal/d/api-latency is what oncall watches — if you're touching request handling, that's the thing that'll page someone + assistant: [saves reference memory: grafana.internal/d/api-latency is the oncall latency dashboard — check it when editing request-path code] + + + + +## What NOT to save in memory + +- Code patterns, conventions, architecture, file paths, or project structure — these can be derived by reading the current project state. +- Git history, recent changes, or who-changed-what — `git log` / `git blame` are authoritative. +- Debugging solutions or fix recipes — the fix is in the code; the commit message has the context. +- Anything already documented in CLAUDE.md files. +- Ephemeral task details: in-progress work, temporary state, current conversation context. + +## How to save memories + +Saving a memory is a two-step process: + +**Step 1** — write the memory to its own file (e.g., `user_role.md`, `feedback_testing.md`) using this frontmatter format: + +```markdown +--- +name: {{memory name}} +description: {{one-line description — used to decide relevance in future conversations, so be specific}} +type: {{user, feedback, project, reference}} +--- + +{{memory content}} +``` + +**Step 2** — add a pointer to that file in `MEMORY.md`. `MEMORY.md` is an index, not a memory — it should contain only links to memory files with brief descriptions. It has no frontmatter. Never write memory content directly into `MEMORY.md`. + +- `MEMORY.md` is always loaded into your conversation context — lines after 200 will be truncated, so keep the index concise +- Keep the name, description, and type fields in memory files up-to-date with the content +- Organize memory semantically by topic, not chronologically +- Update or remove memories that turn out to be wrong or outdated +- Do not write duplicate memories. First check if there is an existing memory you can update before writing a new one. + +## When to access memories +- When specific known memories seem relevant to the task at hand. +- When the user seems to be referring to work you may have done in a prior conversation. +- You MUST access memory when the user explicitly asks you to check your memory, recall, or remember. + +## Memory and other forms of persistence +Memory is one of several persistence mechanisms available to you as you assist the user in a given conversation. The distinction is often that memory can be recalled in future conversations and should not be used for persisting information that is only useful within the scope of the current conversation. +- When to use or update a plan instead of memory: If you are about to start a non-trivial implementation task and would like to reach alignment with the user on your approach you should use a Plan rather than saving this information to memory. Similarly, if you already have a plan within the conversation and you have changed your approach persist that change by updating the plan rather than saving a memory. +- When to use or update tasks instead of memory: When you need to break your work in current conversation into discrete steps or keep track of your progress use tasks instead of saving to memory. Tasks are great for persisting information about the work that needs to be done in the current conversation, but memory should be reserved for information that will be useful in future conversations. + +- Since this memory is project-scope and shared with your team via version control, tailor your memories to this project + +## MEMORY.md + +Your MEMORY.md is currently empty. When you save new memories, they will appear here. diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..1214997 --- /dev/null +++ b/.gitignore @@ -0,0 +1,19 @@ +# Binary +krafttrainer + +# Database +*.db +*.db-shm +*.db-wal + +# Frontend build +frontend/dist/ +frontend/node_modules/ +backend/static/* +!backend/static/.gitkeep + +# Go +backend/vendor/ + +# OS +.DS_Store diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100755 index 0000000..6b69998 --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,93 @@ +# CLAUDE.md + +This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository. + +# Krafttrainer + +Einzelnutzer-Webapplikation zur Verwaltung und Protokollierung von Kraftübungen. + +## Tech Stack + +- **Backend:** Go 1.22+ mit `net/http` stdlib Router, SQLite (`go-sqlite3`), `golang-migrate` +- **Frontend:** React 19, Vite 8, TypeScript (strict), Tailwind CSS 4 (`@tailwindcss/vite`), Zustand, Recharts +- **Paketmanager:** pnpm (Version in `packageManager` field fixiert) +- **Produktion:** Single Binary via `embed` (Frontend in `backend/static/` eingebettet) + +## Build & Run + +```bash +make dev-backend # Go-Server auf :8090 +make dev-frontend # Vite Dev-Server auf :5173 (Proxy → :8090) +make build # Single Binary ./krafttrainer (Frontend-Build + Go-Build mit CGO_ENABLED=1) +make clean # Binary + Dist-Ordner entfernen +``` + +Frontend TypeScript-Typecheck (ohne Build): +```bash +cd frontend && pnpm exec tsc --noEmit +``` + +Es gibt keine automatisierten Tests (weder Go-Tests noch Frontend-Tests). + +## Deployment + +- Läuft auf `192.168.1.118:8090` als systemd-Service (`krafttrainer.service`) +- User: `christoph`, Binary: `/home/christoph/krafttrainer/krafttrainer` +- DB: `/home/christoph/krafttrainer/krafttrainer.db` +- Deploy: `scp krafttrainer christoph@192.168.1.118:~/krafttrainer/`, dann `sudo systemctl restart krafttrainer` + +## Architektur + +### Backend-Flow + +`Handler → Store → DB`. Jeder Handler folgt exakt diesem Muster: +1. `decodeJSON()` mit `DisallowUnknownFields()` +2. `model.Validate()` aufrufen +3. Store-Methode aufrufen +4. Fehler differenzieren und `writeJSON()` / `writeError()` aufrufen + +Store-Methoden geben nach Mutationen immer **frisch aus der DB gelesene Objekte** zurück (kein Rekonstruieren aus Input). + +### Fehlerbehandlung (Backend) + +- Store-Fehler → 500 (generisch, kein DB-Leak) +- Validierungsfehler → 400 +- Nicht gefunden (`sql.ErrNoRows`) → 404 +- UNIQUE-Verletzung → 409 + +Sentinel-Strings im Error-Message für Handler-Differenzierung: `"UNIQUE_VIOLATION:"`, `"SESSION_CLOSED"`. Diese werden mit `strings.Contains()` geprüft — kein custom error type. + +### Routing + +Go 1.22+ Pattern-Matching im stdlib ServeMux mit `{id}`-Platzhaltern. Middleware-Chain: `Recoverer → RequestLogger → CORS`. SPA-Fallback in `main.go`: prüft zuerst ob statische Datei existiert, serviert sonst `index.html`. + +### Frontend-Stores (Zustand) + +Stores sind **flach und unabhängig** — keine direkte Store-zu-Store-Kommunikation. Feedback läuft ausschließlich über `useToastStore.getState().addToast()`. `activeSessionStore` verwaltet einen Timer-Interval manuell (kein React-Effect-Cleanup — beim `stopTimer()` explizit clearen). + +Alle HTTP-Aufrufe gehen über `src/api/client.ts`. `ApiError` (extends Error) hat `status`-Property für HTTP-Statuscodes. + +### Datenbank + +- SQLite mit WAL-Mode und Foreign Keys (via Connection-String-Parameter in `store.go`) +- Migrations auto-run beim Start via embedded FS (`backend/migrations/embed.go`) +- `exercise_name` in `session_logs` **denormalisiert** gespeichert (damit gelöschte Übungen historische Daten nicht verwaisen lassen) +- UNIQUE-Constraint auf `(session_id, exercise_id, set_number)` +- Soft-Delete bei Übungen via `deleted_at` Timestamp + +## Konventionen + +- **API Prefix:** Alle Endpoints unter `/api/v1`. Fehler als `{ "error": "..." }`. +- **Gewichte:** Immer in kg. Feldnamen mit `_kg` Suffix (`weight_kg`, `weight_step_kg`). +- **UI-Sprache:** Deutsch. +- **Dark Mode:** Default. `bg-gray-950` Body, `bg-gray-900` Cards, `blue-500` Primary. +- **Touch-Targets:** min 44×44px. + +## Vollständige Spezifikation + +Siehe `PRD.md` im übergeordneten Verzeichnis (`03_Projekte/fitness-pad/PRD.md`) für: +- Vollständiges SQLite-Schema (Abschnitt 4.1) +- Alle Go-Structs und TypeScript-Typen (Abschnitt 4.2–4.3) +- Komplette API-Spezifikation mit Request/Response-Beispielen (Abschnitt 5) +- Validierungsregeln (Abschnitt 6) +- Frontend-Spezifikation und Komponentenverhalten (Abschnitt 7) diff --git a/Makefile b/Makefile new file mode 100755 index 0000000..073cb8f --- /dev/null +++ b/Makefile @@ -0,0 +1,18 @@ +.PHONY: dev-backend dev-frontend build clean + +dev-backend: + cd backend && go run ./cmd/server + +dev-frontend: + cd frontend && pnpm dev + +build: + cd frontend && pnpm install && pnpm build + rm -rf backend/static/* + cp -r frontend/dist/* backend/static/ + cd backend && CGO_ENABLED=1 go build -o ../krafttrainer ./cmd/server + +clean: + rm -f krafttrainer + rm -rf frontend/dist + find backend/static -not -name '.gitkeep' -not -path backend/static -delete 2>/dev/null || true diff --git a/backend/cmd/server/main.go b/backend/cmd/server/main.go new file mode 100755 index 0000000..2e815dc --- /dev/null +++ b/backend/cmd/server/main.go @@ -0,0 +1,67 @@ +package main + +import ( + "io/fs" + "log" + "net/http" + + "krafttrainer/internal/handler" + mig "krafttrainer/internal/migrate" + "krafttrainer/internal/store" + "krafttrainer/migrations" + "krafttrainer/static" +) + +func main() { + // Datenbank initialisieren + s, err := store.New("krafttrainer.db") + if err != nil { + log.Fatalf("Datenbank: %v", err) + } + defer s.Close() + + // Migrationen ausführen + if err := mig.Run(s.DB(), migrations.FS); err != nil { + log.Fatalf("Migrationen: %v", err) + } + log.Println("Migrationen erfolgreich") + + // HTTP-Routen + mux := http.NewServeMux() + h := handler.New(s) + h.RegisterRoutes(mux) + + // SPA-Fallback: statische Dateien aus embed.FS servieren + mux.Handle("/", spaHandler(static.FS)) + + // Middleware-Chain und Server starten + srv := handler.Chain(mux, handler.Recoverer, handler.RequestLogger, handler.CORS) + + log.Println("Server startet auf :8090") + if err := http.ListenAndServe(":8090", srv); err != nil { + log.Fatalf("Server: %v", err) + } +} + +// spaHandler serviert statische Dateien und fällt auf index.html zurück (SPA-Routing). +func spaHandler(embeddedFS fs.FS) http.Handler { + fileServer := http.FileServer(http.FS(embeddedFS)) + + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + path := r.URL.Path + if path == "/" { + path = "index.html" + } else if len(path) > 0 && path[0] == '/' { + path = path[1:] + } + + f, err := embeddedFS.Open(path) + if err != nil { + // SPA-Fallback: index.html für unbekannte Pfade + r.URL.Path = "/" + } else { + f.Close() + } + fileServer.ServeHTTP(w, r) + }) +} diff --git a/backend/db/migrations/001_create_exercises.down.sql b/backend/db/migrations/001_create_exercises.down.sql new file mode 100755 index 0000000..2eff629 --- /dev/null +++ b/backend/db/migrations/001_create_exercises.down.sql @@ -0,0 +1 @@ +DROP TABLE IF EXISTS exercises; diff --git a/backend/db/migrations/001_create_exercises.up.sql b/backend/db/migrations/001_create_exercises.up.sql new file mode 100755 index 0000000..4a814f1 --- /dev/null +++ b/backend/db/migrations/001_create_exercises.up.sql @@ -0,0 +1,16 @@ +CREATE TABLE exercises ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + name TEXT NOT NULL CHECK(length(name) >= 1 AND length(name) <= 100), + description TEXT DEFAULT '', + muscle_group TEXT NOT NULL CHECK(muscle_group IN ( + 'brust', 'ruecken', 'schultern', 'bizeps', 'trizeps', + 'beine', 'bauch', 'ganzkoerper', 'sonstiges' + )), + weight_step_kg REAL NOT NULL DEFAULT 2.5 CHECK(weight_step_kg > 0), + created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + deleted_at DATETIME +); + +CREATE INDEX idx_exercises_muscle_group ON exercises(muscle_group) WHERE deleted_at IS NULL; +CREATE INDEX idx_exercises_deleted_at ON exercises(deleted_at); diff --git a/backend/db/migrations/002_create_training_sets.down.sql b/backend/db/migrations/002_create_training_sets.down.sql new file mode 100755 index 0000000..7079e06 --- /dev/null +++ b/backend/db/migrations/002_create_training_sets.down.sql @@ -0,0 +1,2 @@ +DROP TABLE IF EXISTS set_exercises; +DROP TABLE IF EXISTS training_sets; diff --git a/backend/db/migrations/002_create_training_sets.up.sql b/backend/db/migrations/002_create_training_sets.up.sql new file mode 100755 index 0000000..17155d7 --- /dev/null +++ b/backend/db/migrations/002_create_training_sets.up.sql @@ -0,0 +1,16 @@ +CREATE TABLE training_sets ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + name TEXT NOT NULL CHECK(length(name) >= 1 AND length(name) <= 100), + created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + deleted_at DATETIME +); + +CREATE TABLE set_exercises ( + set_id INTEGER NOT NULL REFERENCES training_sets(id) ON DELETE CASCADE, + exercise_id INTEGER NOT NULL REFERENCES exercises(id), + position INTEGER NOT NULL CHECK(position >= 0), + PRIMARY KEY (set_id, exercise_id) +); + +CREATE INDEX idx_set_exercises_set_id ON set_exercises(set_id); diff --git a/backend/db/migrations/003_create_sessions.down.sql b/backend/db/migrations/003_create_sessions.down.sql new file mode 100755 index 0000000..297f1b5 --- /dev/null +++ b/backend/db/migrations/003_create_sessions.down.sql @@ -0,0 +1,2 @@ +DROP TABLE IF EXISTS session_logs; +DROP TABLE IF EXISTS sessions; diff --git a/backend/db/migrations/003_create_sessions.up.sql b/backend/db/migrations/003_create_sessions.up.sql new file mode 100755 index 0000000..0270522 --- /dev/null +++ b/backend/db/migrations/003_create_sessions.up.sql @@ -0,0 +1,25 @@ +CREATE TABLE sessions ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + set_id INTEGER NOT NULL REFERENCES training_sets(id), + started_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + ended_at DATETIME, + note TEXT DEFAULT '' +); + +CREATE TABLE session_logs ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + session_id INTEGER NOT NULL REFERENCES sessions(id) ON DELETE CASCADE, + exercise_id INTEGER NOT NULL REFERENCES exercises(id), + exercise_name TEXT NOT NULL, + set_number INTEGER NOT NULL CHECK(set_number >= 1), + weight_kg REAL NOT NULL CHECK(weight_kg >= 0 AND weight_kg <= 999), + reps INTEGER NOT NULL CHECK(reps >= 0 AND reps <= 999), + note TEXT DEFAULT '', + logged_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + UNIQUE(session_id, exercise_id, set_number) +); + +CREATE INDEX idx_sessions_set_id ON sessions(set_id); +CREATE INDEX idx_sessions_started_at ON sessions(started_at); +CREATE INDEX idx_session_logs_session_id ON session_logs(session_id); +CREATE INDEX idx_session_logs_exercise_id ON session_logs(exercise_id); diff --git a/backend/go.mod b/backend/go.mod new file mode 100755 index 0000000..78fee1f --- /dev/null +++ b/backend/go.mod @@ -0,0 +1,8 @@ +module krafttrainer + +go 1.26.1 + +require ( + github.com/golang-migrate/migrate/v4 v4.19.1 // indirect + github.com/mattn/go-sqlite3 v1.14.37 // indirect +) diff --git a/backend/go.sum b/backend/go.sum new file mode 100755 index 0000000..274e921 --- /dev/null +++ b/backend/go.sum @@ -0,0 +1,4 @@ +github.com/golang-migrate/migrate/v4 v4.19.1 h1:OCyb44lFuQfYXYLx1SCxPZQGU7mcaZ7gH9yH4jSFbBA= +github.com/golang-migrate/migrate/v4 v4.19.1/go.mod h1:CTcgfjxhaUtsLipnLoQRWCrjYXycRz/g5+RWDuYgPrE= +github.com/mattn/go-sqlite3 v1.14.37 h1:3DOZp4cXis1cUIpCfXLtmlGolNLp2VEqhiB/PARNBIg= +github.com/mattn/go-sqlite3 v1.14.37/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= diff --git a/backend/internal/handler/exercise.go b/backend/internal/handler/exercise.go new file mode 100755 index 0000000..ad45263 --- /dev/null +++ b/backend/internal/handler/exercise.go @@ -0,0 +1,86 @@ +package handler + +import ( + "database/sql" + "krafttrainer/internal/model" + "net/http" +) + +func (h *Handler) handleListExercises(w http.ResponseWriter, r *http.Request) { + muscleGroup := r.URL.Query().Get("muscle_group") + query := r.URL.Query().Get("q") + + exercises, err := h.store.ListExercises(muscleGroup, query) + if err != nil { + writeError(w, http.StatusInternalServerError, "Fehler beim Laden der Übungen") + return + } + writeJSON(w, http.StatusOK, exercises) +} + +func (h *Handler) handleCreateExercise(w http.ResponseWriter, r *http.Request) { + var req model.CreateExerciseRequest + if err := decodeJSON(r, &req); err != nil { + writeError(w, http.StatusBadRequest, "Ungültiger Request-Body") + return + } + if err := req.Validate(); err != nil { + writeError(w, http.StatusBadRequest, err.Error()) + return + } + + exercise, err := h.store.CreateExercise(&req) + if err != nil { + writeError(w, http.StatusInternalServerError, "Fehler beim Erstellen der Übung") + return + } + writeJSON(w, http.StatusCreated, exercise) +} + +func (h *Handler) handleUpdateExercise(w http.ResponseWriter, r *http.Request) { + id, err := pathID(r, "id") + if err != nil { + writeError(w, http.StatusBadRequest, "Ungültige ID") + return + } + + var req model.CreateExerciseRequest + if err := decodeJSON(r, &req); err != nil { + writeError(w, http.StatusBadRequest, "Ungültiger Request-Body") + return + } + if err := req.Validate(); err != nil { + writeError(w, http.StatusBadRequest, err.Error()) + return + } + + exercise, err := h.store.UpdateExercise(id, &req) + if err != nil { + writeError(w, http.StatusInternalServerError, "Fehler beim Aktualisieren der Übung") + return + } + if exercise == nil { + writeError(w, http.StatusNotFound, "Übung nicht gefunden") + return + } + writeJSON(w, http.StatusOK, exercise) +} + +func (h *Handler) handleDeleteExercise(w http.ResponseWriter, r *http.Request) { + id, err := pathID(r, "id") + if err != nil { + writeError(w, http.StatusBadRequest, "Ungültige ID") + return + } + + err = h.store.SoftDeleteExercise(id) + if err == sql.ErrNoRows { + writeError(w, http.StatusNotFound, "Übung nicht gefunden") + return + } + if err != nil { + writeError(w, http.StatusInternalServerError, "Fehler beim Löschen der Übung") + return + } + w.WriteHeader(http.StatusNoContent) +} diff --git a/backend/internal/handler/handler.go b/backend/internal/handler/handler.go new file mode 100755 index 0000000..8724325 --- /dev/null +++ b/backend/internal/handler/handler.go @@ -0,0 +1,83 @@ +package handler + +import ( + "encoding/json" + "krafttrainer/internal/store" + "net/http" + "strconv" +) + +// Handler bündelt alle HTTP-Handler und hält eine Referenz auf den Store. +type Handler struct { + store *store.Store +} + +// New erstellt einen neuen Handler. +func New(store *store.Store) *Handler { + return &Handler{store: store} +} + +// RegisterRoutes registriert alle API-Routen am ServeMux. +func (h *Handler) RegisterRoutes(mux *http.ServeMux) { + // Exercises + mux.HandleFunc("GET /api/v1/exercises", h.handleListExercises) + mux.HandleFunc("POST /api/v1/exercises", h.handleCreateExercise) + mux.HandleFunc("PUT /api/v1/exercises/{id}", h.handleUpdateExercise) + mux.HandleFunc("DELETE /api/v1/exercises/{id}", h.handleDeleteExercise) + + // Training Sets + mux.HandleFunc("GET /api/v1/sets", h.handleListSets) + mux.HandleFunc("POST /api/v1/sets", h.handleCreateSet) + mux.HandleFunc("PUT /api/v1/sets/{id}", h.handleUpdateSet) + mux.HandleFunc("DELETE /api/v1/sets/{id}", h.handleDeleteSet) + + // Sessions + mux.HandleFunc("POST /api/v1/sessions", h.handleCreateSession) + mux.HandleFunc("GET /api/v1/sessions", h.handleListSessions) + mux.HandleFunc("GET /api/v1/sessions/{id}", h.handleGetSession) + mux.HandleFunc("PUT /api/v1/sessions/{id}/end", h.handleEndSession) + + // Session Logs + mux.HandleFunc("POST /api/v1/sessions/{id}/logs", h.handleCreateLog) + mux.HandleFunc("PUT /api/v1/sessions/{id}/logs/{logId}", h.handleUpdateLog) + mux.HandleFunc("DELETE /api/v1/sessions/{id}/logs/{logId}", h.handleDeleteLog) + + // Stats + mux.HandleFunc("GET /api/v1/exercises/{id}/last-log", h.handleGetLastLog) + mux.HandleFunc("GET /api/v1/exercises/{id}/history", h.handleGetExerciseHistory) + mux.HandleFunc("GET /api/v1/stats/overview", h.handleGetStatsOverview) +} + +// --- Hilfsfunktionen --- + +func writeJSON(w http.ResponseWriter, status int, data any) { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(status) + json.NewEncoder(w).Encode(data) +} + +func writeError(w http.ResponseWriter, status int, message string) { + writeJSON(w, status, map[string]string{"error": message}) +} + +func decodeJSON(r *http.Request, dst any) error { + dec := json.NewDecoder(r.Body) + dec.DisallowUnknownFields() + return dec.Decode(dst) +} + +func pathID(r *http.Request, name string) (int64, error) { + return strconv.ParseInt(r.PathValue(name), 10, 64) +} + +func queryInt(r *http.Request, name string, defaultVal int) int { + v := r.URL.Query().Get(name) + if v == "" { + return defaultVal + } + n, err := strconv.Atoi(v) + if err != nil { + return defaultVal + } + return n +} diff --git a/backend/internal/handler/middleware.go b/backend/internal/handler/middleware.go new file mode 100755 index 0000000..f95722c --- /dev/null +++ b/backend/internal/handler/middleware.go @@ -0,0 +1,52 @@ +package handler + +import ( + "log" + "net/http" + "time" +) + +// Chain wendet Middlewares in der angegebenen Reihenfolge an. +func Chain(h http.Handler, middlewares ...func(http.Handler) http.Handler) http.Handler { + for i := len(middlewares) - 1; i >= 0; i-- { + h = middlewares[i](h) + } + return h +} + +// CORS erlaubt Cross-Origin-Requests vom Vite Dev-Server. +func CORS(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Access-Control-Allow-Origin", "http://*") + w.Header().Set("Access-Control-Allow-Methods", "GET, POST, PUT, DELETE, OPTIONS") + w.Header().Set("Access-Control-Allow-Headers", "Content-Type") + + if r.Method == http.MethodOptions { + w.WriteHeader(http.StatusNoContent) + return + } + next.ServeHTTP(w, r) + }) +} + +// RequestLogger loggt eingehende Requests mit Dauer. +func RequestLogger(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + start := time.Now() + next.ServeHTTP(w, r) + log.Printf("%s %s %s", r.Method, r.URL.Path, time.Since(start)) + }) +} + +// Recoverer fängt Panics in Handlern ab und gibt 500 zurück. +func Recoverer(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + defer func() { + if err := recover(); err != nil { + log.Printf("PANIC: %v", err) + writeError(w, http.StatusInternalServerError, "Interner Serverfehler") + } + }() + next.ServeHTTP(w, r) + }) +} diff --git a/backend/internal/handler/session.go b/backend/internal/handler/session.go new file mode 100755 index 0000000..4b83cd2 --- /dev/null +++ b/backend/internal/handler/session.go @@ -0,0 +1,190 @@ +package handler + +import ( + "database/sql" + "krafttrainer/internal/model" + "net/http" + "strings" +) + +func (h *Handler) handleCreateSession(w http.ResponseWriter, r *http.Request) { + var req model.CreateSessionRequest + if err := decodeJSON(r, &req); err != nil { + writeError(w, http.StatusBadRequest, "Ungültiger Request-Body") + return + } + if req.SetID == 0 { + writeError(w, http.StatusBadRequest, "set_id ist erforderlich") + return + } + + session, err := h.store.CreateSession(req.SetID) + if err != nil { + if strings.Contains(err.Error(), "existiert nicht") { + writeError(w, http.StatusBadRequest, err.Error()) + return + } + writeError(w, http.StatusInternalServerError, "Fehler beim Starten der Session") + return + } + writeJSON(w, http.StatusCreated, session) +} + +func (h *Handler) handleListSessions(w http.ResponseWriter, r *http.Request) { + limit := queryInt(r, "limit", 20) + offset := queryInt(r, "offset", 0) + + sessions, err := h.store.ListSessions(limit, offset) + if err != nil { + writeError(w, http.StatusInternalServerError, "Fehler beim Laden der Sessions") + return + } + writeJSON(w, http.StatusOK, sessions) +} + +func (h *Handler) handleGetSession(w http.ResponseWriter, r *http.Request) { + id, err := pathID(r, "id") + if err != nil { + writeError(w, http.StatusBadRequest, "Ungültige ID") + return + } + + session, err := h.store.GetSession(id) + if err != nil { + writeError(w, http.StatusInternalServerError, "Fehler beim Laden der Session") + return + } + if session == nil { + writeError(w, http.StatusNotFound, "Session nicht gefunden") + return + } + writeJSON(w, http.StatusOK, session) +} + +func (h *Handler) handleEndSession(w http.ResponseWriter, r *http.Request) { + id, err := pathID(r, "id") + if err != nil { + writeError(w, http.StatusBadRequest, "Ungültige ID") + return + } + + var body struct { + Note string `json:"note"` + } + // Body ist optional + decodeJSON(r, &body) + + session, err := h.store.EndSession(id, body.Note) + if err != nil { + writeError(w, http.StatusInternalServerError, "Fehler beim Beenden der Session") + return + } + if session == nil { + writeError(w, http.StatusNotFound, "Session nicht gefunden oder bereits beendet") + return + } + writeJSON(w, http.StatusOK, session) +} + +func (h *Handler) handleCreateLog(w http.ResponseWriter, r *http.Request) { + sessionID, err := pathID(r, "id") + if err != nil { + writeError(w, http.StatusBadRequest, "Ungültige Session-ID") + return + } + + var req model.CreateLogRequest + if err := decodeJSON(r, &req); err != nil { + writeError(w, http.StatusBadRequest, "Ungültiger Request-Body") + return + } + if err := req.Validate(); err != nil { + writeError(w, http.StatusBadRequest, err.Error()) + return + } + + log, err := h.store.CreateLog(sessionID, &req) + if err != nil { + if strings.Contains(err.Error(), "SESSION_CLOSED") { + writeError(w, http.StatusBadRequest, "Session ist bereits beendet") + return + } + if strings.Contains(err.Error(), "UNIQUE_VIOLATION") { + writeError(w, http.StatusConflict, err.Error()) + return + } + if strings.Contains(err.Error(), "existiert nicht") { + writeError(w, http.StatusBadRequest, err.Error()) + return + } + writeError(w, http.StatusInternalServerError, "Fehler beim Loggen des Satzes") + return + } + writeJSON(w, http.StatusCreated, log) +} + +func (h *Handler) handleUpdateLog(w http.ResponseWriter, r *http.Request) { + sessionID, err := pathID(r, "id") + if err != nil { + writeError(w, http.StatusBadRequest, "Ungültige Session-ID") + return + } + logID, err := pathID(r, "logId") + if err != nil { + writeError(w, http.StatusBadRequest, "Ungültige Log-ID") + return + } + + var req model.UpdateLogRequest + if err := decodeJSON(r, &req); err != nil { + writeError(w, http.StatusBadRequest, "Ungültiger Request-Body") + return + } + if err := req.Validate(); err != nil { + writeError(w, http.StatusBadRequest, err.Error()) + return + } + + log, err := h.store.UpdateLog(sessionID, logID, &req) + if err != nil { + if strings.Contains(err.Error(), "SESSION_CLOSED") { + writeError(w, http.StatusBadRequest, "Session ist bereits beendet") + return + } + writeError(w, http.StatusInternalServerError, "Fehler beim Aktualisieren des Satzes") + return + } + if log == nil { + writeError(w, http.StatusNotFound, "Log nicht gefunden") + return + } + writeJSON(w, http.StatusOK, log) +} + +func (h *Handler) handleDeleteLog(w http.ResponseWriter, r *http.Request) { + sessionID, err := pathID(r, "id") + if err != nil { + writeError(w, http.StatusBadRequest, "Ungültige Session-ID") + return + } + logID, err := pathID(r, "logId") + if err != nil { + writeError(w, http.StatusBadRequest, "Ungültige Log-ID") + return + } + + err = h.store.DeleteLog(sessionID, logID) + if err != nil { + if strings.Contains(err.Error(), "SESSION_CLOSED") { + writeError(w, http.StatusBadRequest, "Session ist bereits beendet") + return + } + if err == sql.ErrNoRows { + writeError(w, http.StatusNotFound, "Log nicht gefunden") + return + } + writeError(w, http.StatusInternalServerError, "Fehler beim Löschen des Satzes") + return + } + w.WriteHeader(http.StatusNoContent) +} diff --git a/backend/internal/handler/stats.go b/backend/internal/handler/stats.go new file mode 100755 index 0000000..e391b4b --- /dev/null +++ b/backend/internal/handler/stats.go @@ -0,0 +1,47 @@ +package handler + +import "net/http" + +func (h *Handler) handleGetLastLog(w http.ResponseWriter, r *http.Request) { + id, err := pathID(r, "id") + if err != nil { + writeError(w, http.StatusBadRequest, "Ungültige ID") + return + } + + lastLog, err := h.store.GetLastLog(id) + if err != nil { + writeError(w, http.StatusInternalServerError, "Fehler beim Laden des letzten Logs") + return + } + if lastLog == nil { + writeError(w, http.StatusNotFound, "Noch kein Log für diese Übung") + return + } + writeJSON(w, http.StatusOK, lastLog) +} + +func (h *Handler) handleGetExerciseHistory(w http.ResponseWriter, r *http.Request) { + id, err := pathID(r, "id") + if err != nil { + writeError(w, http.StatusBadRequest, "Ungültige ID") + return + } + limit := queryInt(r, "limit", 30) + + logs, err := h.store.GetExerciseHistory(id, limit) + if err != nil { + writeError(w, http.StatusInternalServerError, "Fehler beim Laden der Übungshistorie") + return + } + writeJSON(w, http.StatusOK, logs) +} + +func (h *Handler) handleGetStatsOverview(w http.ResponseWriter, r *http.Request) { + overview, err := h.store.GetStatsOverview() + if err != nil { + writeError(w, http.StatusInternalServerError, "Fehler beim Laden der Statistiken") + return + } + writeJSON(w, http.StatusOK, overview) +} diff --git a/backend/internal/handler/training_set.go b/backend/internal/handler/training_set.go new file mode 100755 index 0000000..822dc6d --- /dev/null +++ b/backend/internal/handler/training_set.go @@ -0,0 +1,92 @@ +package handler + +import ( + "database/sql" + "krafttrainer/internal/model" + "net/http" + "strings" +) + +func (h *Handler) handleListSets(w http.ResponseWriter, r *http.Request) { + sets, err := h.store.ListSets() + if err != nil { + writeError(w, http.StatusInternalServerError, "Fehler beim Laden der Sets") + return + } + writeJSON(w, http.StatusOK, sets) +} + +func (h *Handler) handleCreateSet(w http.ResponseWriter, r *http.Request) { + var req model.CreateSetRequest + if err := decodeJSON(r, &req); err != nil { + writeError(w, http.StatusBadRequest, "Ungültiger Request-Body") + return + } + if err := req.Validate(); err != nil { + writeError(w, http.StatusBadRequest, err.Error()) + return + } + + set, err := h.store.CreateSet(&req) + if err != nil { + if strings.Contains(err.Error(), "existiert nicht") { + writeError(w, http.StatusBadRequest, err.Error()) + return + } + writeError(w, http.StatusInternalServerError, "Fehler beim Erstellen des Sets") + return + } + writeJSON(w, http.StatusCreated, set) +} + +func (h *Handler) handleUpdateSet(w http.ResponseWriter, r *http.Request) { + id, err := pathID(r, "id") + if err != nil { + writeError(w, http.StatusBadRequest, "Ungültige ID") + return + } + + var req model.UpdateSetRequest + if err := decodeJSON(r, &req); err != nil { + writeError(w, http.StatusBadRequest, "Ungültiger Request-Body") + return + } + if err := req.Validate(); err != nil { + writeError(w, http.StatusBadRequest, err.Error()) + return + } + + set, err := h.store.UpdateSet(id, &req) + if err != nil { + if strings.Contains(err.Error(), "existiert nicht") { + writeError(w, http.StatusBadRequest, err.Error()) + return + } + writeError(w, http.StatusInternalServerError, "Fehler beim Aktualisieren des Sets") + return + } + if set == nil { + writeError(w, http.StatusNotFound, "Set nicht gefunden") + return + } + writeJSON(w, http.StatusOK, set) +} + +func (h *Handler) handleDeleteSet(w http.ResponseWriter, r *http.Request) { + id, err := pathID(r, "id") + if err != nil { + writeError(w, http.StatusBadRequest, "Ungültige ID") + return + } + + err = h.store.SoftDeleteSet(id) + if err == sql.ErrNoRows { + writeError(w, http.StatusNotFound, "Set nicht gefunden") + return + } + if err != nil { + writeError(w, http.StatusInternalServerError, "Fehler beim Löschen des Sets") + return + } + w.WriteHeader(http.StatusNoContent) +} diff --git a/backend/internal/migrate/migrate.go b/backend/internal/migrate/migrate.go new file mode 100755 index 0000000..b09134b --- /dev/null +++ b/backend/internal/migrate/migrate.go @@ -0,0 +1,36 @@ +package migrate + +import ( + "database/sql" + "errors" + "fmt" + "io/fs" + + "github.com/golang-migrate/migrate/v4" + "github.com/golang-migrate/migrate/v4/database/sqlite3" + "github.com/golang-migrate/migrate/v4/source/iofs" +) + +// Run führt alle ausstehenden Migrationen aus. +// migrationsFS muss die Migrations-Dateien enthalten. +func Run(db *sql.DB, migrationsFS fs.FS) error { + driver, err := sqlite3.WithInstance(db, &sqlite3.Config{}) + if err != nil { + return fmt.Errorf("Migrations-Treiber erstellen: %w", err) + } + + source, err := iofs.New(migrationsFS, ".") + if err != nil { + return fmt.Errorf("Migrations-Quelle erstellen: %w", err) + } + + m, err := migrate.NewWithInstance("iofs", source, "sqlite3", driver) + if err != nil { + return fmt.Errorf("Migrator erstellen: %w", err) + } + + if err := m.Up(); err != nil && !errors.Is(err, migrate.ErrNoChange) { + return fmt.Errorf("Migrationen ausführen: %w", err) + } + return nil +} diff --git a/backend/internal/model/exercise.go b/backend/internal/model/exercise.go new file mode 100755 index 0000000..5c685c8 --- /dev/null +++ b/backend/internal/model/exercise.go @@ -0,0 +1,47 @@ +package model + +import ( + "errors" + "strings" + "time" +) + +// Exercise repräsentiert eine Kraftübung. +type Exercise struct { + ID int64 `json:"id"` + Name string `json:"name"` + Description string `json:"description"` + MuscleGroup string `json:"muscle_group"` + WeightStepKg float64 `json:"weight_step_kg"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` + DeletedAt *time.Time `json:"deleted_at,omitempty"` +} + +// CreateExerciseRequest enthält die Felder zum Anlegen einer Übung. +type CreateExerciseRequest struct { + Name string `json:"name"` + Description string `json:"description"` + MuscleGroup string `json:"muscle_group"` + WeightStepKg *float64 `json:"weight_step_kg"` +} + +// Validate prüft und normalisiert den Request. Setzt Default für WeightStepKg. +func (r *CreateExerciseRequest) Validate() error { + r.Name = strings.TrimSpace(r.Name) + if len(r.Name) == 0 || len(r.Name) > 100 { + return errors.New("Name muss 1–100 Zeichen lang sein") + } + if !ValidMuscleGroup(r.MuscleGroup) { + return errors.New("Ungültige Muskelgruppe") + } + if r.WeightStepKg != nil { + if *r.WeightStepKg <= 0 { + return errors.New("Gewichtsschritt muss > 0 sein") + } + } else { + def := 2.5 + r.WeightStepKg = &def + } + return nil +} diff --git a/backend/internal/model/session.go b/backend/internal/model/session.go new file mode 100755 index 0000000..43d7224 --- /dev/null +++ b/backend/internal/model/session.go @@ -0,0 +1,19 @@ +package model + +import "time" + +// Session repräsentiert eine Trainingseinheit. +type Session struct { + ID int64 `json:"id"` + SetID int64 `json:"set_id"` + SetName string `json:"set_name"` + StartedAt time.Time `json:"started_at"` + EndedAt *time.Time `json:"ended_at,omitempty"` + Note string `json:"note"` + Logs []SessionLog `json:"logs,omitempty"` +} + +// CreateSessionRequest enthält die Felder zum Starten einer Session. +type CreateSessionRequest struct { + SetID int64 `json:"set_id"` +} diff --git a/backend/internal/model/session_log.go b/backend/internal/model/session_log.go new file mode 100755 index 0000000..20150ba --- /dev/null +++ b/backend/internal/model/session_log.go @@ -0,0 +1,76 @@ +package model + +import ( + "errors" + "time" +) + +// SessionLog repräsentiert einen einzelnen Satz innerhalb einer Session. +type SessionLog struct { + ID int64 `json:"id"` + SessionID int64 `json:"session_id"` + ExerciseID int64 `json:"exercise_id"` + ExerciseName string `json:"exercise_name"` + SetNumber int `json:"set_number"` + WeightKg float64 `json:"weight_kg"` + Reps int `json:"reps"` + Note string `json:"note"` + LoggedAt time.Time `json:"logged_at"` +} + +// CreateLogRequest enthält die Felder zum Loggen eines Satzes. +type CreateLogRequest struct { + ExerciseID int64 `json:"exercise_id"` + SetNumber int `json:"set_number"` + WeightKg float64 `json:"weight_kg"` + Reps int `json:"reps"` + Note string `json:"note"` +} + +// Validate prüft den Request. +func (r *CreateLogRequest) Validate() error { + if r.SetNumber < 1 { + return errors.New("Satznummer muss ≥ 1 sein") + } + if r.WeightKg < 0 || r.WeightKg > 999 { + return errors.New("Gewicht muss zwischen 0 und 999 kg liegen") + } + if r.Reps < 0 || r.Reps > 999 { + return errors.New("Wiederholungen müssen zwischen 0 und 999 liegen") + } + return nil +} + +// UpdateLogRequest enthält die Felder zum Korrigieren eines Satzes. +type UpdateLogRequest struct { + WeightKg *float64 `json:"weight_kg"` + Reps *int `json:"reps"` + Note *string `json:"note"` +} + +// Validate prüft den Request. +func (r *UpdateLogRequest) Validate() error { + if r.WeightKg != nil && (*r.WeightKg < 0 || *r.WeightKg > 999) { + return errors.New("Gewicht muss zwischen 0 und 999 kg liegen") + } + if r.Reps != nil && (*r.Reps < 0 || *r.Reps > 999) { + return errors.New("Wiederholungen müssen zwischen 0 und 999 liegen") + } + return nil +} + +// LastLogResponse enthält die letzten Werte einer Übung. +type LastLogResponse struct { + WeightKg float64 `json:"weight_kg"` + Reps int `json:"reps"` +} + +// ExerciseStats enthält aggregierte Statistiken einer Übung. +type ExerciseStats struct { + ExerciseID int64 `json:"exercise_id"` + ExerciseName string `json:"exercise_name"` + MaxWeightKg float64 `json:"max_weight_kg"` + TotalVolumeKg float64 `json:"total_volume_kg"` + TotalSets int `json:"total_sets"` + LastTrained string `json:"last_trained"` +} diff --git a/backend/internal/model/training_set.go b/backend/internal/model/training_set.go new file mode 100755 index 0000000..f0dd5fc --- /dev/null +++ b/backend/internal/model/training_set.go @@ -0,0 +1,52 @@ +package model + +import ( + "errors" + "strings" + "time" +) + +// TrainingSet ist eine benannte Zusammenstellung von Übungen. +type TrainingSet struct { + ID int64 `json:"id"` + Name string `json:"name"` + Exercises []Exercise `json:"exercises"` + CreatedAt time.Time `json:"created_at"` + DeletedAt *time.Time `json:"deleted_at,omitempty"` +} + +// CreateSetRequest enthält die Felder zum Anlegen eines Sets. +type CreateSetRequest struct { + Name string `json:"name"` + ExerciseIDs []int64 `json:"exercise_ids"` +} + +// Validate prüft und normalisiert den Request. +func (r *CreateSetRequest) Validate() error { + r.Name = strings.TrimSpace(r.Name) + if len(r.Name) == 0 || len(r.Name) > 100 { + return errors.New("Name muss 1–100 Zeichen lang sein") + } + if len(r.ExerciseIDs) == 0 { + return errors.New("Mindestens eine Übung erforderlich") + } + return nil +} + +// UpdateSetRequest enthält die Felder zum Aktualisieren eines Sets. +type UpdateSetRequest struct { + Name string `json:"name"` + ExerciseIDs []int64 `json:"exercise_ids"` +} + +// Validate prüft und normalisiert den Request. +func (r *UpdateSetRequest) Validate() error { + r.Name = strings.TrimSpace(r.Name) + if len(r.Name) == 0 || len(r.Name) > 100 { + return errors.New("Name muss 1–100 Zeichen lang sein") + } + if len(r.ExerciseIDs) == 0 { + return errors.New("Mindestens eine Übung erforderlich") + } + return nil +} diff --git a/backend/internal/model/validation.go b/backend/internal/model/validation.go new file mode 100755 index 0000000..e658641 --- /dev/null +++ b/backend/internal/model/validation.go @@ -0,0 +1,28 @@ +package model + +// Gültige Muskelgruppen für Übungen. +var muscleGroups = map[string]bool{ + "brust": true, + "ruecken": true, + "schultern": true, + "bizeps": true, + "trizeps": true, + "beine": true, + "bauch": true, + "ganzkoerper": true, + "sonstiges": true, +} + +// ValidMuscleGroup prüft ob die übergebene Muskelgruppe gültig ist. +func ValidMuscleGroup(mg string) bool { + return muscleGroups[mg] +} + +// MuscleGroups gibt alle gültigen Muskelgruppen zurück. +func MuscleGroups() []string { + groups := make([]string, 0, len(muscleGroups)) + for g := range muscleGroups { + groups = append(groups, g) + } + return groups +} diff --git a/backend/internal/store/exercise_store.go b/backend/internal/store/exercise_store.go new file mode 100755 index 0000000..d259dfc --- /dev/null +++ b/backend/internal/store/exercise_store.go @@ -0,0 +1,105 @@ +package store + +import ( + "database/sql" + "fmt" + "krafttrainer/internal/model" +) + +// ListExercises gibt alle nicht-gelöschten Übungen zurück, optional gefiltert. +func (s *Store) ListExercises(muscleGroup, query string) ([]model.Exercise, error) { + rows, err := s.db.Query(` + SELECT id, name, description, muscle_group, weight_step_kg, created_at, updated_at + FROM exercises + WHERE deleted_at IS NULL + AND (muscle_group = ? OR ? = '') + AND (name LIKE '%' || ? || '%' OR ? = '') + ORDER BY name`, + muscleGroup, muscleGroup, query, query, + ) + if err != nil { + return nil, fmt.Errorf("Übungen abfragen: %w", err) + } + defer rows.Close() + + var exercises []model.Exercise + for rows.Next() { + var e model.Exercise + if err := rows.Scan(&e.ID, &e.Name, &e.Description, &e.MuscleGroup, &e.WeightStepKg, &e.CreatedAt, &e.UpdatedAt); err != nil { + return nil, fmt.Errorf("Übung scannen: %w", err) + } + exercises = append(exercises, e) + } + if exercises == nil { + exercises = []model.Exercise{} + } + return exercises, rows.Err() +} + +// GetExercise gibt eine einzelne Übung zurück. +func (s *Store) GetExercise(id int64) (*model.Exercise, error) { + var e model.Exercise + err := s.db.QueryRow(` + SELECT id, name, description, muscle_group, weight_step_kg, created_at, updated_at, deleted_at + FROM exercises WHERE id = ?`, id, + ).Scan(&e.ID, &e.Name, &e.Description, &e.MuscleGroup, &e.WeightStepKg, &e.CreatedAt, &e.UpdatedAt, &e.DeletedAt) + if err == sql.ErrNoRows { + return nil, nil + } + if err != nil { + return nil, fmt.Errorf("Übung abfragen: %w", err) + } + return &e, nil +} + +// CreateExercise legt eine neue Übung an und gibt sie zurück. +func (s *Store) CreateExercise(req *model.CreateExerciseRequest) (*model.Exercise, error) { + result, err := s.db.Exec(` + INSERT INTO exercises (name, description, muscle_group, weight_step_kg) + VALUES (?, ?, ?, ?)`, + req.Name, req.Description, req.MuscleGroup, *req.WeightStepKg, + ) + if err != nil { + return nil, fmt.Errorf("Übung erstellen: %w", err) + } + + id, _ := result.LastInsertId() + return s.GetExercise(id) +} + +// UpdateExercise aktualisiert eine Übung und gibt sie zurück. +func (s *Store) UpdateExercise(id int64, req *model.CreateExerciseRequest) (*model.Exercise, error) { + result, err := s.db.Exec(` + UPDATE exercises + SET name = ?, description = ?, muscle_group = ?, weight_step_kg = ?, + updated_at = CURRENT_TIMESTAMP + WHERE id = ? AND deleted_at IS NULL`, + req.Name, req.Description, req.MuscleGroup, *req.WeightStepKg, id, + ) + if err != nil { + return nil, fmt.Errorf("Übung aktualisieren: %w", err) + } + + rows, _ := result.RowsAffected() + if rows == 0 { + return nil, nil + } + return s.GetExercise(id) +} + +// SoftDeleteExercise markiert eine Übung als gelöscht. +func (s *Store) SoftDeleteExercise(id int64) error { + result, err := s.db.Exec(` + UPDATE exercises SET deleted_at = CURRENT_TIMESTAMP, updated_at = CURRENT_TIMESTAMP + WHERE id = ? AND deleted_at IS NULL`, id, + ) + if err != nil { + return fmt.Errorf("Übung löschen: %w", err) + } + + rows, _ := result.RowsAffected() + if rows == 0 { + return sql.ErrNoRows + } + return nil +} diff --git a/backend/internal/store/session_store.go b/backend/internal/store/session_store.go new file mode 100755 index 0000000..8b9f665 --- /dev/null +++ b/backend/internal/store/session_store.go @@ -0,0 +1,269 @@ +package store + +import ( + "database/sql" + "fmt" + "krafttrainer/internal/model" + "strings" +) + +// CreateSession startet eine neue Trainingseinheit. +func (s *Store) CreateSession(setID int64) (*model.Session, error) { + // Set prüfen + var setName string + err := s.db.QueryRow(`SELECT name FROM training_sets WHERE id = ? AND deleted_at IS NULL`, setID).Scan(&setName) + if err == sql.ErrNoRows { + return nil, fmt.Errorf("Set %d existiert nicht", setID) + } + if err != nil { + return nil, fmt.Errorf("Set prüfen: %w", err) + } + + result, err := s.db.Exec(`INSERT INTO sessions (set_id) VALUES (?)`, setID) + if err != nil { + return nil, fmt.Errorf("Session erstellen: %w", err) + } + + id, _ := result.LastInsertId() + return s.GetSession(id) +} + +// GetSession gibt eine Session mit allen Logs zurück. +func (s *Store) GetSession(id int64) (*model.Session, error) { + var sess model.Session + err := s.db.QueryRow(` + SELECT s.id, s.set_id, ts.name, s.started_at, s.ended_at, s.note + FROM sessions s + JOIN training_sets ts ON ts.id = s.set_id + WHERE s.id = ?`, id, + ).Scan(&sess.ID, &sess.SetID, &sess.SetName, &sess.StartedAt, &sess.EndedAt, &sess.Note) + if err == sql.ErrNoRows { + return nil, nil + } + if err != nil { + return nil, fmt.Errorf("Session abfragen: %w", err) + } + + logs, err := s.getSessionLogs(id) + if err != nil { + return nil, err + } + sess.Logs = logs + return &sess, nil +} + +// EndSession beendet eine Session. +func (s *Store) EndSession(id int64, note string) (*model.Session, error) { + result, err := s.db.Exec(` + UPDATE sessions SET ended_at = CURRENT_TIMESTAMP, note = ? + WHERE id = ? AND ended_at IS NULL`, note, id, + ) + if err != nil { + return nil, fmt.Errorf("Session beenden: %w", err) + } + rows, _ := result.RowsAffected() + if rows == 0 { + return nil, nil + } + return s.GetSession(id) +} + +// ListSessions gibt paginierte Sessions zurück (neueste zuerst). +func (s *Store) ListSessions(limit, offset int) ([]model.Session, error) { + rows, err := s.db.Query(` + SELECT s.id, s.set_id, ts.name, s.started_at, s.ended_at, s.note + FROM sessions s + JOIN training_sets ts ON ts.id = s.set_id + ORDER BY s.started_at DESC + LIMIT ? OFFSET ?`, limit, offset, + ) + if err != nil { + return nil, fmt.Errorf("Sessions abfragen: %w", err) + } + defer rows.Close() + + var sessions []model.Session + for rows.Next() { + var sess model.Session + if err := rows.Scan(&sess.ID, &sess.SetID, &sess.SetName, &sess.StartedAt, &sess.EndedAt, &sess.Note); err != nil { + return nil, fmt.Errorf("Session scannen: %w", err) + } + sessions = append(sessions, sess) + } + if sessions == nil { + sessions = []model.Session{} + } + return sessions, rows.Err() +} + +// CreateLog fügt einen Satz zu einer offenen Session hinzu. +func (s *Store) CreateLog(sessionID int64, req *model.CreateLogRequest) (*model.SessionLog, error) { + // Session offen? + if err := s.checkSessionOpen(sessionID); err != nil { + return nil, err + } + + // Übungsname denormalisiert speichern + var exerciseName string + err := s.db.QueryRow(`SELECT name FROM exercises WHERE id = ? AND deleted_at IS NULL`, req.ExerciseID).Scan(&exerciseName) + if err == sql.ErrNoRows { + return nil, fmt.Errorf("Übung %d existiert nicht", req.ExerciseID) + } + if err != nil { + return nil, fmt.Errorf("Übung abfragen: %w", err) + } + + result, err := s.db.Exec(` + INSERT INTO session_logs (session_id, exercise_id, exercise_name, set_number, weight_kg, reps, note) + VALUES (?, ?, ?, ?, ?, ?, ?)`, + sessionID, req.ExerciseID, exerciseName, req.SetNumber, req.WeightKg, req.Reps, req.Note, + ) + if err != nil { + if strings.Contains(err.Error(), "UNIQUE constraint") { + return nil, fmt.Errorf("UNIQUE_VIOLATION: Satz %d für diese Übung existiert bereits", req.SetNumber) + } + return nil, fmt.Errorf("Log erstellen: %w", err) + } + + id, _ := result.LastInsertId() + return s.getLog(id) +} + +// UpdateLog korrigiert einen Satz in einer offenen Session. +func (s *Store) UpdateLog(sessionID, logID int64, req *model.UpdateLogRequest) (*model.SessionLog, error) { + if err := s.checkSessionOpen(sessionID); err != nil { + return nil, err + } + + // Log gehört zur Session? + var exists bool + err := s.db.QueryRow(`SELECT EXISTS(SELECT 1 FROM session_logs WHERE id = ? AND session_id = ?)`, logID, sessionID).Scan(&exists) + if err != nil { + return nil, fmt.Errorf("Log prüfen: %w", err) + } + if !exists { + return nil, nil + } + + // Partielle Updates + updates := []string{} + args := []any{} + if req.WeightKg != nil { + updates = append(updates, "weight_kg = ?") + args = append(args, *req.WeightKg) + } + if req.Reps != nil { + updates = append(updates, "reps = ?") + args = append(args, *req.Reps) + } + if req.Note != nil { + updates = append(updates, "note = ?") + args = append(args, *req.Note) + } + if len(updates) == 0 { + return s.getLog(logID) + } + + args = append(args, logID) + _, err = s.db.Exec( + fmt.Sprintf("UPDATE session_logs SET %s WHERE id = ?", strings.Join(updates, ", ")), + args..., + ) + if err != nil { + return nil, fmt.Errorf("Log aktualisieren: %w", err) + } + return s.getLog(logID) +} + +// DeleteLog löscht einen Satz aus einer offenen Session. +func (s *Store) DeleteLog(sessionID, logID int64) error { + if err := s.checkSessionOpen(sessionID); err != nil { + return err + } + + result, err := s.db.Exec(`DELETE FROM session_logs WHERE id = ? AND session_id = ?`, logID, sessionID) + if err != nil { + return fmt.Errorf("Log löschen: %w", err) + } + rows, _ := result.RowsAffected() + if rows == 0 { + return sql.ErrNoRows + } + return nil +} + +// GetLastLog gibt die letzten Werte einer Übung zurück. +func (s *Store) GetLastLog(exerciseID int64) (*model.LastLogResponse, error) { + var resp model.LastLogResponse + err := s.db.QueryRow(` + SELECT weight_kg, reps FROM session_logs + WHERE exercise_id = ? + ORDER BY logged_at DESC LIMIT 1`, exerciseID, + ).Scan(&resp.WeightKg, &resp.Reps) + if err == sql.ErrNoRows { + return nil, nil + } + if err != nil { + return nil, fmt.Errorf("Letzten Log abfragen: %w", err) + } + return &resp, nil +} + +// checkSessionOpen prüft ob eine Session offen ist. +func (s *Store) checkSessionOpen(sessionID int64) error { + var endedAt *string + err := s.db.QueryRow(`SELECT ended_at FROM sessions WHERE id = ?`, sessionID).Scan(&endedAt) + if err == sql.ErrNoRows { + return fmt.Errorf("Session %d existiert nicht", sessionID) + } + if err != nil { + return fmt.Errorf("Session prüfen: %w", err) + } + if endedAt != nil { + return fmt.Errorf("SESSION_CLOSED: Session ist bereits beendet") + } + return nil +} + +// getLog gibt einen einzelnen Log-Eintrag zurück. +func (s *Store) getLog(id int64) (*model.SessionLog, error) { + var log model.SessionLog + err := s.db.QueryRow(` + SELECT id, session_id, exercise_id, exercise_name, set_number, weight_kg, reps, note, logged_at + FROM session_logs WHERE id = ?`, id, + ).Scan(&log.ID, &log.SessionID, &log.ExerciseID, &log.ExerciseName, &log.SetNumber, &log.WeightKg, &log.Reps, &log.Note, &log.LoggedAt) + if err == sql.ErrNoRows { + return nil, nil + } + if err != nil { + return nil, fmt.Errorf("Log abfragen: %w", err) + } + return &log, nil +} + +// getSessionLogs gibt alle Logs einer Session zurück. +func (s *Store) getSessionLogs(sessionID int64) ([]model.SessionLog, error) { + rows, err := s.db.Query(` + SELECT id, session_id, exercise_id, exercise_name, set_number, weight_kg, reps, note, logged_at + FROM session_logs + WHERE session_id = ? + ORDER BY exercise_id, set_number`, sessionID, + ) + if err != nil { + return nil, fmt.Errorf("Logs abfragen: %w", err) + } + defer rows.Close() + + var logs []model.SessionLog + for rows.Next() { + var log model.SessionLog + if err := rows.Scan(&log.ID, &log.SessionID, &log.ExerciseID, &log.ExerciseName, &log.SetNumber, &log.WeightKg, &log.Reps, &log.Note, &log.LoggedAt); err != nil { + return nil, fmt.Errorf("Log scannen: %w", err) + } + logs = append(logs, log) + } + if logs == nil { + logs = []model.SessionLog{} + } + return logs, rows.Err() +} diff --git a/backend/internal/store/set_store.go b/backend/internal/store/set_store.go new file mode 100755 index 0000000..ffe2ecf --- /dev/null +++ b/backend/internal/store/set_store.go @@ -0,0 +1,200 @@ +package store + +import ( + "database/sql" + "fmt" + "krafttrainer/internal/model" +) + +// ListSets gibt alle nicht-gelöschten Sets mit ihren Übungen zurück. +func (s *Store) ListSets() ([]model.TrainingSet, error) { + rows, err := s.db.Query(` + SELECT id, name, created_at FROM training_sets + WHERE deleted_at IS NULL ORDER BY name`) + if err != nil { + return nil, fmt.Errorf("Sets abfragen: %w", err) + } + defer rows.Close() + + var sets []model.TrainingSet + for rows.Next() { + var ts model.TrainingSet + if err := rows.Scan(&ts.ID, &ts.Name, &ts.CreatedAt); err != nil { + return nil, fmt.Errorf("Set scannen: %w", err) + } + sets = append(sets, ts) + } + if err := rows.Err(); err != nil { + return nil, err + } + if sets == nil { + sets = []model.TrainingSet{} + } + + for i := range sets { + exercises, err := s.getSetExercises(sets[i].ID) + if err != nil { + return nil, err + } + sets[i].Exercises = exercises + } + return sets, nil +} + +// GetSet gibt ein einzelnes Set mit Übungen zurück. +func (s *Store) GetSet(id int64) (*model.TrainingSet, error) { + var ts model.TrainingSet + err := s.db.QueryRow(` + SELECT id, name, created_at, deleted_at FROM training_sets WHERE id = ?`, id, + ).Scan(&ts.ID, &ts.Name, &ts.CreatedAt, &ts.DeletedAt) + if err == sql.ErrNoRows { + return nil, nil + } + if err != nil { + return nil, fmt.Errorf("Set abfragen: %w", err) + } + + exercises, err := s.getSetExercises(id) + if err != nil { + return nil, err + } + ts.Exercises = exercises + return &ts, nil +} + +// CreateSet legt ein neues Set an (in einer Transaktion). +func (s *Store) CreateSet(req *model.CreateSetRequest) (*model.TrainingSet, error) { + tx, err := s.db.Begin() + if err != nil { + return nil, fmt.Errorf("Transaktion starten: %w", err) + } + defer tx.Rollback() + + // Prüfen ob alle Übungen existieren + for _, eid := range req.ExerciseIDs { + var exists bool + err := tx.QueryRow(`SELECT EXISTS(SELECT 1 FROM exercises WHERE id = ? AND deleted_at IS NULL)`, eid).Scan(&exists) + if err != nil { + return nil, fmt.Errorf("Übung prüfen: %w", err) + } + if !exists { + return nil, fmt.Errorf("Übung %d existiert nicht", eid) + } + } + + result, err := tx.Exec(`INSERT INTO training_sets (name) VALUES (?)`, req.Name) + if err != nil { + return nil, fmt.Errorf("Set erstellen: %w", err) + } + + id, _ := result.LastInsertId() + + for pos, eid := range req.ExerciseIDs { + _, err := tx.Exec(`INSERT INTO set_exercises (set_id, exercise_id, position) VALUES (?, ?, ?)`, id, eid, pos) + if err != nil { + return nil, fmt.Errorf("Set-Übung zuordnen: %w", err) + } + } + + if err := tx.Commit(); err != nil { + return nil, fmt.Errorf("Transaktion committen: %w", err) + } + return s.GetSet(id) +} + +// UpdateSet aktualisiert ein Set (Name + Übungszuordnungen). +func (s *Store) UpdateSet(id int64, req *model.UpdateSetRequest) (*model.TrainingSet, error) { + tx, err := s.db.Begin() + if err != nil { + return nil, fmt.Errorf("Transaktion starten: %w", err) + } + defer tx.Rollback() + + // Prüfen ob Set existiert + var exists bool + err = tx.QueryRow(`SELECT EXISTS(SELECT 1 FROM training_sets WHERE id = ? AND deleted_at IS NULL)`, id).Scan(&exists) + if err != nil { + return nil, fmt.Errorf("Set prüfen: %w", err) + } + if !exists { + return nil, nil + } + + // Prüfen ob alle Übungen existieren + for _, eid := range req.ExerciseIDs { + var eExists bool + err := tx.QueryRow(`SELECT EXISTS(SELECT 1 FROM exercises WHERE id = ? AND deleted_at IS NULL)`, eid).Scan(&eExists) + if err != nil { + return nil, fmt.Errorf("Übung prüfen: %w", err) + } + if !eExists { + return nil, fmt.Errorf("Übung %d existiert nicht", eid) + } + } + + _, err = tx.Exec(`UPDATE training_sets SET name = ?, updated_at = CURRENT_TIMESTAMP WHERE id = ?`, req.Name, id) + if err != nil { + return nil, fmt.Errorf("Set aktualisieren: %w", err) + } + + _, err = tx.Exec(`DELETE FROM set_exercises WHERE set_id = ?`, id) + if err != nil { + return nil, fmt.Errorf("Set-Übungen löschen: %w", err) + } + + for pos, eid := range req.ExerciseIDs { + _, err := tx.Exec(`INSERT INTO set_exercises (set_id, exercise_id, position) VALUES (?, ?, ?)`, id, eid, pos) + if err != nil { + return nil, fmt.Errorf("Set-Übung zuordnen: %w", err) + } + } + + if err := tx.Commit(); err != nil { + return nil, fmt.Errorf("Transaktion committen: %w", err) + } + return s.GetSet(id) +} + +// SoftDeleteSet markiert ein Set als gelöscht. +func (s *Store) SoftDeleteSet(id int64) error { + result, err := s.db.Exec(` + UPDATE training_sets SET deleted_at = CURRENT_TIMESTAMP, updated_at = CURRENT_TIMESTAMP + WHERE id = ? AND deleted_at IS NULL`, id, + ) + if err != nil { + return fmt.Errorf("Set löschen: %w", err) + } + rows, _ := result.RowsAffected() + if rows == 0 { + return sql.ErrNoRows + } + return nil +} + +// getSetExercises lädt die Übungen eines Sets sortiert nach Position. +func (s *Store) getSetExercises(setID int64) ([]model.Exercise, error) { + rows, err := s.db.Query(` + SELECT e.id, e.name, e.description, e.muscle_group, e.weight_step_kg, e.created_at, e.updated_at + FROM exercises e + JOIN set_exercises se ON se.exercise_id = e.id + WHERE se.set_id = ? + ORDER BY se.position`, setID, + ) + if err != nil { + return nil, fmt.Errorf("Set-Übungen abfragen: %w", err) + } + defer rows.Close() + + var exercises []model.Exercise + for rows.Next() { + var e model.Exercise + if err := rows.Scan(&e.ID, &e.Name, &e.Description, &e.MuscleGroup, &e.WeightStepKg, &e.CreatedAt, &e.UpdatedAt); err != nil { + return nil, fmt.Errorf("Übung scannen: %w", err) + } + exercises = append(exercises, e) + } + if exercises == nil { + exercises = []model.Exercise{} + } + return exercises, rows.Err() +} diff --git a/backend/internal/store/stats_store.go b/backend/internal/store/stats_store.go new file mode 100755 index 0000000..9a14baa --- /dev/null +++ b/backend/internal/store/stats_store.go @@ -0,0 +1,85 @@ +package store + +import ( + "fmt" + "krafttrainer/internal/model" +) + +// StatsOverview enthält die Gesamtübersicht. +type StatsOverview struct { + TotalSessions int `json:"total_sessions"` + TotalVolumeKg float64 `json:"total_volume_kg"` + SessionsThisWeek int `json:"sessions_this_week"` + Exercises []model.ExerciseStats `json:"exercises"` +} + +// GetStatsOverview gibt die Gesamtstatistik zurück. +func (s *Store) GetStatsOverview() (*StatsOverview, error) { + var overview StatsOverview + + err := s.db.QueryRow(` + SELECT + (SELECT COUNT(*) FROM sessions WHERE ended_at IS NOT NULL), + (SELECT COALESCE(SUM(weight_kg * reps), 0) FROM session_logs), + (SELECT COUNT(*) FROM sessions WHERE ended_at IS NOT NULL AND started_at >= date('now', '-7 days')) + `).Scan(&overview.TotalSessions, &overview.TotalVolumeKg, &overview.SessionsThisWeek) + if err != nil { + return nil, fmt.Errorf("Übersicht abfragen: %w", err) + } + + rows, err := s.db.Query(` + SELECT + sl.exercise_id, + sl.exercise_name, + MAX(sl.weight_kg) as max_weight_kg, + SUM(sl.weight_kg * sl.reps) as total_volume_kg, + COUNT(*) as total_sets, + MAX(sl.logged_at) as last_trained + FROM session_logs sl + GROUP BY sl.exercise_id + ORDER BY last_trained DESC`) + if err != nil { + return nil, fmt.Errorf("Übungs-Stats abfragen: %w", err) + } + defer rows.Close() + + for rows.Next() { + var es model.ExerciseStats + if err := rows.Scan(&es.ExerciseID, &es.ExerciseName, &es.MaxWeightKg, &es.TotalVolumeKg, &es.TotalSets, &es.LastTrained); err != nil { + return nil, fmt.Errorf("Übungs-Stats scannen: %w", err) + } + overview.Exercises = append(overview.Exercises, es) + } + if overview.Exercises == nil { + overview.Exercises = []model.ExerciseStats{} + } + return &overview, rows.Err() +} + +// GetExerciseHistory gibt die letzten N Logs einer Übung zurück. +func (s *Store) GetExerciseHistory(exerciseID int64, limit int) ([]model.SessionLog, error) { + rows, err := s.db.Query(` + SELECT id, session_id, exercise_id, exercise_name, set_number, weight_kg, reps, note, logged_at + FROM session_logs + WHERE exercise_id = ? + ORDER BY logged_at DESC + LIMIT ?`, exerciseID, limit, + ) + if err != nil { + return nil, fmt.Errorf("Übungshistorie abfragen: %w", err) + } + defer rows.Close() + + var logs []model.SessionLog + for rows.Next() { + var log model.SessionLog + if err := rows.Scan(&log.ID, &log.SessionID, &log.ExerciseID, &log.ExerciseName, &log.SetNumber, &log.WeightKg, &log.Reps, &log.Note, &log.LoggedAt); err != nil { + return nil, fmt.Errorf("Log scannen: %w", err) + } + logs = append(logs, log) + } + if logs == nil { + logs = []model.SessionLog{} + } + return logs, rows.Err() +} diff --git a/backend/internal/store/store.go b/backend/internal/store/store.go new file mode 100755 index 0000000..d8c5eb9 --- /dev/null +++ b/backend/internal/store/store.go @@ -0,0 +1,37 @@ +package store + +import ( + "database/sql" + "fmt" + + _ "github.com/mattn/go-sqlite3" +) + +// Store kapselt den Datenbankzugriff. +type Store struct { + db *sql.DB +} + +// New erstellt einen neuen Store und konfiguriert SQLite. +func New(dbPath string) (*Store, error) { + db, err := sql.Open("sqlite3", dbPath+"?_journal_mode=WAL&_foreign_keys=ON") + if err != nil { + return nil, fmt.Errorf("Datenbank öffnen: %w", err) + } + + if err := db.Ping(); err != nil { + return nil, fmt.Errorf("Datenbank-Verbindung prüfen: %w", err) + } + + return &Store{db: db}, nil +} + +// DB gibt die zugrundeliegende Datenbankverbindung zurück (für Migrations). +func (s *Store) DB() *sql.DB { + return s.db +} + +// Close schliesst die Datenbankverbindung. +func (s *Store) Close() error { + return s.db.Close() +} diff --git a/backend/migrations/001_create_exercises.down.sql b/backend/migrations/001_create_exercises.down.sql new file mode 100755 index 0000000..2eff629 --- /dev/null +++ b/backend/migrations/001_create_exercises.down.sql @@ -0,0 +1 @@ +DROP TABLE IF EXISTS exercises; diff --git a/backend/migrations/001_create_exercises.up.sql b/backend/migrations/001_create_exercises.up.sql new file mode 100755 index 0000000..4a814f1 --- /dev/null +++ b/backend/migrations/001_create_exercises.up.sql @@ -0,0 +1,16 @@ +CREATE TABLE exercises ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + name TEXT NOT NULL CHECK(length(name) >= 1 AND length(name) <= 100), + description TEXT DEFAULT '', + muscle_group TEXT NOT NULL CHECK(muscle_group IN ( + 'brust', 'ruecken', 'schultern', 'bizeps', 'trizeps', + 'beine', 'bauch', 'ganzkoerper', 'sonstiges' + )), + weight_step_kg REAL NOT NULL DEFAULT 2.5 CHECK(weight_step_kg > 0), + created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + deleted_at DATETIME +); + +CREATE INDEX idx_exercises_muscle_group ON exercises(muscle_group) WHERE deleted_at IS NULL; +CREATE INDEX idx_exercises_deleted_at ON exercises(deleted_at); diff --git a/backend/migrations/002_create_training_sets.down.sql b/backend/migrations/002_create_training_sets.down.sql new file mode 100755 index 0000000..7079e06 --- /dev/null +++ b/backend/migrations/002_create_training_sets.down.sql @@ -0,0 +1,2 @@ +DROP TABLE IF EXISTS set_exercises; +DROP TABLE IF EXISTS training_sets; diff --git a/backend/migrations/002_create_training_sets.up.sql b/backend/migrations/002_create_training_sets.up.sql new file mode 100755 index 0000000..17155d7 --- /dev/null +++ b/backend/migrations/002_create_training_sets.up.sql @@ -0,0 +1,16 @@ +CREATE TABLE training_sets ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + name TEXT NOT NULL CHECK(length(name) >= 1 AND length(name) <= 100), + created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + deleted_at DATETIME +); + +CREATE TABLE set_exercises ( + set_id INTEGER NOT NULL REFERENCES training_sets(id) ON DELETE CASCADE, + exercise_id INTEGER NOT NULL REFERENCES exercises(id), + position INTEGER NOT NULL CHECK(position >= 0), + PRIMARY KEY (set_id, exercise_id) +); + +CREATE INDEX idx_set_exercises_set_id ON set_exercises(set_id); diff --git a/backend/migrations/003_create_sessions.down.sql b/backend/migrations/003_create_sessions.down.sql new file mode 100755 index 0000000..297f1b5 --- /dev/null +++ b/backend/migrations/003_create_sessions.down.sql @@ -0,0 +1,2 @@ +DROP TABLE IF EXISTS session_logs; +DROP TABLE IF EXISTS sessions; diff --git a/backend/migrations/003_create_sessions.up.sql b/backend/migrations/003_create_sessions.up.sql new file mode 100755 index 0000000..0270522 --- /dev/null +++ b/backend/migrations/003_create_sessions.up.sql @@ -0,0 +1,25 @@ +CREATE TABLE sessions ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + set_id INTEGER NOT NULL REFERENCES training_sets(id), + started_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + ended_at DATETIME, + note TEXT DEFAULT '' +); + +CREATE TABLE session_logs ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + session_id INTEGER NOT NULL REFERENCES sessions(id) ON DELETE CASCADE, + exercise_id INTEGER NOT NULL REFERENCES exercises(id), + exercise_name TEXT NOT NULL, + set_number INTEGER NOT NULL CHECK(set_number >= 1), + weight_kg REAL NOT NULL CHECK(weight_kg >= 0 AND weight_kg <= 999), + reps INTEGER NOT NULL CHECK(reps >= 0 AND reps <= 999), + note TEXT DEFAULT '', + logged_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + UNIQUE(session_id, exercise_id, set_number) +); + +CREATE INDEX idx_sessions_set_id ON sessions(set_id); +CREATE INDEX idx_sessions_started_at ON sessions(started_at); +CREATE INDEX idx_session_logs_session_id ON session_logs(session_id); +CREATE INDEX idx_session_logs_exercise_id ON session_logs(exercise_id); diff --git a/backend/migrations/embed.go b/backend/migrations/embed.go new file mode 100755 index 0000000..4f95b7b --- /dev/null +++ b/backend/migrations/embed.go @@ -0,0 +1,8 @@ +package migrations + +import "embed" + +// FS enthält die eingebetteten SQL-Migrations-Dateien. +// +//go:embed *.sql +var FS embed.FS diff --git a/backend/static/.gitkeep b/backend/static/.gitkeep new file mode 100755 index 0000000..e69de29 diff --git a/backend/static/embed.go b/backend/static/embed.go new file mode 100755 index 0000000..1c7e588 --- /dev/null +++ b/backend/static/embed.go @@ -0,0 +1,9 @@ +package static + +import "embed" + +// FS enthält die eingebetteten Frontend-Dateien (nach vite build). +// In Entwicklung ist dieser FS leer (nur .gitkeep). +// +//go:embed all:* +var FS embed.FS diff --git a/frontend/index.html b/frontend/index.html new file mode 100755 index 0000000..2fa1639 --- /dev/null +++ b/frontend/index.html @@ -0,0 +1,12 @@ + + + + + + Krafttrainer + + +
+ + + diff --git a/frontend/package.json b/frontend/package.json new file mode 100755 index 0000000..b46ab6c --- /dev/null +++ b/frontend/package.json @@ -0,0 +1,33 @@ +{ + "name": "frontend", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "dev": "vite", + "build": "tsc && vite build", + "preview": "vite preview" + }, + "keywords": [], + "author": "", + "license": "ISC", + "packageManager": "pnpm@10.32.1", + "devDependencies": { + "@tailwindcss/vite": "^4.2.2", + "@types/react": "^19.2.14", + "@types/react-dom": "^19.2.3", + "@vitejs/plugin-react": "^6.0.1", + "autoprefixer": "^10.4.27", + "postcss": "^8.5.8", + "tailwindcss": "^4.2.2", + "typescript": "^5.9.3", + "vite": "^8.0.1" + }, + "dependencies": { + "react": "^19.2.4", + "react-dom": "^19.2.4", + "react-router-dom": "^7.13.1", + "recharts": "^3.8.0", + "zustand": "^5.0.12" + } +} diff --git a/frontend/pnpm-lock.yaml b/frontend/pnpm-lock.yaml new file mode 100755 index 0000000..d27b7a9 --- /dev/null +++ b/frontend/pnpm-lock.yaml @@ -0,0 +1,1299 @@ +lockfileVersion: '9.0' + +settings: + autoInstallPeers: true + excludeLinksFromLockfile: false + +importers: + + .: + dependencies: + react: + specifier: ^19.2.4 + version: 19.2.4 + react-dom: + specifier: ^19.2.4 + version: 19.2.4(react@19.2.4) + react-router-dom: + specifier: ^7.13.1 + version: 7.13.1(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + recharts: + specifier: ^3.8.0 + version: 3.8.0(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react-is@19.2.4)(react@19.2.4)(redux@5.0.1) + zustand: + specifier: ^5.0.12 + version: 5.0.12(@types/react@19.2.14)(immer@11.1.4)(react@19.2.4)(use-sync-external-store@1.6.0(react@19.2.4)) + devDependencies: + '@tailwindcss/vite': + specifier: ^4.2.2 + version: 4.2.2(vite@8.0.1(jiti@2.6.1)) + '@types/react': + specifier: ^19.2.14 + version: 19.2.14 + '@types/react-dom': + specifier: ^19.2.3 + version: 19.2.3(@types/react@19.2.14) + '@vitejs/plugin-react': + specifier: ^6.0.1 + version: 6.0.1(vite@8.0.1(jiti@2.6.1)) + autoprefixer: + specifier: ^10.4.27 + version: 10.4.27(postcss@8.5.8) + postcss: + specifier: ^8.5.8 + version: 8.5.8 + tailwindcss: + specifier: ^4.2.2 + version: 4.2.2 + typescript: + specifier: ^5.9.3 + version: 5.9.3 + vite: + specifier: ^8.0.1 + version: 8.0.1(jiti@2.6.1) + +packages: + + '@emnapi/core@1.9.1': + resolution: {integrity: sha512-mukuNALVsoix/w1BJwFzwXBN/dHeejQtuVzcDsfOEsdpCumXb/E9j8w11h5S54tT1xhifGfbbSm/ICrObRb3KA==} + + '@emnapi/runtime@1.9.1': + resolution: {integrity: sha512-VYi5+ZVLhpgK4hQ0TAjiQiZ6ol0oe4mBx7mVv7IflsiEp0OWoVsp/+f9Vc1hOhE0TtkORVrI1GvzyreqpgWtkA==} + + '@emnapi/wasi-threads@1.2.0': + resolution: {integrity: sha512-N10dEJNSsUx41Z6pZsXU8FjPjpBEplgH24sfkmITrBED1/U2Esum9F3lfLrMjKHHjmi557zQn7kR9R+XWXu5Rg==} + + '@jridgewell/gen-mapping@0.3.13': + resolution: {integrity: sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==} + + '@jridgewell/remapping@2.3.5': + resolution: {integrity: sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==} + + '@jridgewell/resolve-uri@3.1.2': + resolution: {integrity: sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==} + engines: {node: '>=6.0.0'} + + '@jridgewell/sourcemap-codec@1.5.5': + resolution: {integrity: sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==} + + '@jridgewell/trace-mapping@0.3.31': + resolution: {integrity: sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==} + + '@napi-rs/wasm-runtime@1.1.1': + resolution: {integrity: sha512-p64ah1M1ld8xjWv3qbvFwHiFVWrq1yFvV4f7w+mzaqiR4IlSgkqhcRdHwsGgomwzBH51sRY4NEowLxnaBjcW/A==} + + '@oxc-project/types@0.120.0': + resolution: {integrity: sha512-k1YNu55DuvAip/MGE1FTsIuU3FUCn6v/ujG9V7Nq5Df/kX2CWb13hhwD0lmJGMGqE+bE1MXvv9SZVnMzEXlWcg==} + + '@reduxjs/toolkit@2.11.2': + resolution: {integrity: sha512-Kd6kAHTA6/nUpp8mySPqj3en3dm0tdMIgbttnQ1xFMVpufoj+ADi8pXLBsd4xzTRHQa7t/Jv8W5UnCuW4kuWMQ==} + peerDependencies: + react: ^16.9.0 || ^17.0.0 || ^18 || ^19 + react-redux: ^7.2.1 || ^8.1.3 || ^9.0.0 + peerDependenciesMeta: + react: + optional: true + react-redux: + optional: true + + '@rolldown/binding-android-arm64@1.0.0-rc.10': + resolution: {integrity: sha512-jOHxwXhxmFKuXztiu1ORieJeTbx5vrTkcOkkkn2d35726+iwhrY1w/+nYY/AGgF12thg33qC3R1LMBF5tHTZHg==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [arm64] + os: [android] + + '@rolldown/binding-darwin-arm64@1.0.0-rc.10': + resolution: {integrity: sha512-gED05Teg/vtTZbIJBc4VNMAxAFDUPkuO/rAIyyxZjTj1a1/s6z5TII/5yMGZ0uLRCifEtwUQn8OlYzuYc0m70w==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [arm64] + os: [darwin] + + '@rolldown/binding-darwin-x64@1.0.0-rc.10': + resolution: {integrity: sha512-rI15NcM1mA48lqrIxVkHfAqcyFLcQwyXWThy+BQ5+mkKKPvSO26ir+ZDp36AgYoYVkqvMcdS8zOE6SeBsR9e8A==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [x64] + os: [darwin] + + '@rolldown/binding-freebsd-x64@1.0.0-rc.10': + resolution: {integrity: sha512-XZRXHdTa+4ME1MuDVp021+doQ+z6Ei4CCFmNc5/sKbqb8YmkiJdj8QKlV3rCI0AJtAeSB5n0WGPuJWNL9p/L2w==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [x64] + os: [freebsd] + + '@rolldown/binding-linux-arm-gnueabihf@1.0.0-rc.10': + resolution: {integrity: sha512-R0SQMRluISSLzFE20sPWYHVmJdDQnRyc/FzSCN72BqQmh2SOZUFG+N3/vBZpR4C6WpEUVYJLrYUXaj43sJsNLA==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [arm] + os: [linux] + + '@rolldown/binding-linux-arm64-gnu@1.0.0-rc.10': + resolution: {integrity: sha512-Y1reMrV/o+cwpduYhJuOE3OMKx32RMYCidf14y+HssARRmhDuWXJ4yVguDg2R/8SyyGNo+auzz64LnPK9Hq6jg==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [arm64] + os: [linux] + libc: [glibc] + + '@rolldown/binding-linux-arm64-musl@1.0.0-rc.10': + resolution: {integrity: sha512-vELN+HNb2IzuzSBUOD4NHmP9yrGwl1DVM29wlQvx1OLSclL0NgVWnVDKl/8tEks79EFek/kebQKnNJkIAA4W2g==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [arm64] + os: [linux] + libc: [musl] + + '@rolldown/binding-linux-ppc64-gnu@1.0.0-rc.10': + resolution: {integrity: sha512-ZqrufYTgzxbHwpqOjzSsb0UV/aV2TFIY5rP8HdsiPTv/CuAgCRjM6s9cYFwQ4CNH+hf9Y4erHW1GjZuZ7WoI7w==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [ppc64] + os: [linux] + libc: [glibc] + + '@rolldown/binding-linux-s390x-gnu@1.0.0-rc.10': + resolution: {integrity: sha512-gSlmVS1FZJSRicA6IyjoRoKAFK7IIHBs7xJuHRSmjImqk3mPPWbR7RhbnfH2G6bcmMEllCt2vQ/7u9e6bBnByg==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [s390x] + os: [linux] + libc: [glibc] + + '@rolldown/binding-linux-x64-gnu@1.0.0-rc.10': + resolution: {integrity: sha512-eOCKUpluKgfObT2pHjztnaWEIbUabWzk3qPZ5PuacuPmr4+JtQG4k2vGTY0H15edaTnicgU428XW/IH6AimcQw==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [x64] + os: [linux] + libc: [glibc] + + '@rolldown/binding-linux-x64-musl@1.0.0-rc.10': + resolution: {integrity: sha512-Xdf2jQbfQowJnLcgYfD/m0Uu0Qj5OdxKallD78/IPPfzaiaI4KRAwZzHcKQ4ig1gtg1SuzC7jovNiM2TzQsBXA==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [x64] + os: [linux] + libc: [musl] + + '@rolldown/binding-openharmony-arm64@1.0.0-rc.10': + resolution: {integrity: sha512-o1hYe8hLi1EY6jgPFyxQgQ1wcycX+qz8eEbVmot2hFkgUzPxy9+kF0u0NIQBeDq+Mko47AkaFFaChcvZa9UX9Q==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [arm64] + os: [openharmony] + + '@rolldown/binding-wasm32-wasi@1.0.0-rc.10': + resolution: {integrity: sha512-Ugv9o7qYJudqQO5Y5y2N2SOo6S4WiqiNOpuQyoPInnhVzCY+wi/GHltcLHypG9DEUYMB0iTB/huJrpadiAcNcA==} + engines: {node: '>=14.0.0'} + cpu: [wasm32] + + '@rolldown/binding-win32-arm64-msvc@1.0.0-rc.10': + resolution: {integrity: sha512-7UODQb4fQUNT/vmgDZBl3XOBAIOutP5R3O/rkxg0aLfEGQ4opbCgU5vOw/scPe4xOqBwL9fw7/RP1vAMZ6QlAQ==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [arm64] + os: [win32] + + '@rolldown/binding-win32-x64-msvc@1.0.0-rc.10': + resolution: {integrity: sha512-PYxKHMVHOb5NJuDL53vBUl1VwUjymDcYI6rzpIni0C9+9mTiJedvUxSk7/RPp7OOAm3v+EjgMu9bIy3N6b408w==} + engines: {node: ^20.19.0 || >=22.12.0} + cpu: [x64] + os: [win32] + + '@rolldown/pluginutils@1.0.0-rc.10': + resolution: {integrity: sha512-UkVDEFk1w3mveXeKgaTuYfKWtPbvgck1dT8TUG3bnccrH0XtLTuAyfCoks4Q/M5ZGToSVJTIQYCzy2g/atAOeg==} + + '@rolldown/pluginutils@1.0.0-rc.7': + resolution: {integrity: sha512-qujRfC8sFVInYSPPMLQByRh7zhwkGFS4+tyMQ83srV1qrxL4g8E2tyxVVyxd0+8QeBM1mIk9KbWxkegRr76XzA==} + + '@standard-schema/spec@1.1.0': + resolution: {integrity: sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w==} + + '@standard-schema/utils@0.3.0': + resolution: {integrity: sha512-e7Mew686owMaPJVNNLs55PUvgz371nKgwsc4vxE49zsODpJEnxgxRo2y/OKrqueavXgZNMDVj3DdHFlaSAeU8g==} + + '@tailwindcss/node@4.2.2': + resolution: {integrity: sha512-pXS+wJ2gZpVXqFaUEjojq7jzMpTGf8rU6ipJz5ovJV6PUGmlJ+jvIwGrzdHdQ80Sg+wmQxUFuoW1UAAwHNEdFA==} + + '@tailwindcss/oxide-android-arm64@4.2.2': + resolution: {integrity: sha512-dXGR1n+P3B6748jZO/SvHZq7qBOqqzQ+yFrXpoOWWALWndF9MoSKAT3Q0fYgAzYzGhxNYOoysRvYlpixRBBoDg==} + engines: {node: '>= 20'} + cpu: [arm64] + os: [android] + + '@tailwindcss/oxide-darwin-arm64@4.2.2': + resolution: {integrity: sha512-iq9Qjr6knfMpZHj55/37ouZeykwbDqF21gPFtfnhCCKGDcPI/21FKC9XdMO/XyBM7qKORx6UIhGgg6jLl7BZlg==} + engines: {node: '>= 20'} + cpu: [arm64] + os: [darwin] + + '@tailwindcss/oxide-darwin-x64@4.2.2': + resolution: {integrity: sha512-BlR+2c3nzc8f2G639LpL89YY4bdcIdUmiOOkv2GQv4/4M0vJlpXEa0JXNHhCHU7VWOKWT/CjqHdTP8aUuDJkuw==} + engines: {node: '>= 20'} + cpu: [x64] + os: [darwin] + + '@tailwindcss/oxide-freebsd-x64@4.2.2': + resolution: {integrity: sha512-YUqUgrGMSu2CDO82hzlQ5qSb5xmx3RUrke/QgnoEx7KvmRJHQuZHZmZTLSuuHwFf0DJPybFMXMYf+WJdxHy/nQ==} + engines: {node: '>= 20'} + cpu: [x64] + os: [freebsd] + + '@tailwindcss/oxide-linux-arm-gnueabihf@4.2.2': + resolution: {integrity: sha512-FPdhvsW6g06T9BWT0qTwiVZYE2WIFo2dY5aCSpjG/S/u1tby+wXoslXS0kl3/KXnULlLr1E3NPRRw0g7t2kgaQ==} + engines: {node: '>= 20'} + cpu: [arm] + os: [linux] + + '@tailwindcss/oxide-linux-arm64-gnu@4.2.2': + resolution: {integrity: sha512-4og1V+ftEPXGttOO7eCmW7VICmzzJWgMx+QXAJRAhjrSjumCwWqMfkDrNu1LXEQzNAwz28NCUpucgQPrR4S2yw==} + engines: {node: '>= 20'} + cpu: [arm64] + os: [linux] + libc: [glibc] + + '@tailwindcss/oxide-linux-arm64-musl@4.2.2': + resolution: {integrity: sha512-oCfG/mS+/+XRlwNjnsNLVwnMWYH7tn/kYPsNPh+JSOMlnt93mYNCKHYzylRhI51X+TbR+ufNhhKKzm6QkqX8ag==} + engines: {node: '>= 20'} + cpu: [arm64] + os: [linux] + libc: [musl] + + '@tailwindcss/oxide-linux-x64-gnu@4.2.2': + resolution: {integrity: sha512-rTAGAkDgqbXHNp/xW0iugLVmX62wOp2PoE39BTCGKjv3Iocf6AFbRP/wZT/kuCxC9QBh9Pu8XPkv/zCZB2mcMg==} + engines: {node: '>= 20'} + cpu: [x64] + os: [linux] + libc: [glibc] + + '@tailwindcss/oxide-linux-x64-musl@4.2.2': + resolution: {integrity: sha512-XW3t3qwbIwiSyRCggeO2zxe3KWaEbM0/kW9e8+0XpBgyKU4ATYzcVSMKteZJ1iukJ3HgHBjbg9P5YPRCVUxlnQ==} + engines: {node: '>= 20'} + cpu: [x64] + os: [linux] + libc: [musl] + + '@tailwindcss/oxide-wasm32-wasi@4.2.2': + resolution: {integrity: sha512-eKSztKsmEsn1O5lJ4ZAfyn41NfG7vzCg496YiGtMDV86jz1q/irhms5O0VrY6ZwTUkFy/EKG3RfWgxSI3VbZ8Q==} + engines: {node: '>=14.0.0'} + cpu: [wasm32] + bundledDependencies: + - '@napi-rs/wasm-runtime' + - '@emnapi/core' + - '@emnapi/runtime' + - '@tybys/wasm-util' + - '@emnapi/wasi-threads' + - tslib + + '@tailwindcss/oxide-win32-arm64-msvc@4.2.2': + resolution: {integrity: sha512-qPmaQM4iKu5mxpsrWZMOZRgZv1tOZpUm+zdhhQP0VhJfyGGO3aUKdbh3gDZc/dPLQwW4eSqWGrrcWNBZWUWaXQ==} + engines: {node: '>= 20'} + cpu: [arm64] + os: [win32] + + '@tailwindcss/oxide-win32-x64-msvc@4.2.2': + resolution: {integrity: sha512-1T/37VvI7WyH66b+vqHj/cLwnCxt7Qt3WFu5Q8hk65aOvlwAhs7rAp1VkulBJw/N4tMirXjVnylTR72uI0HGcA==} + engines: {node: '>= 20'} + cpu: [x64] + os: [win32] + + '@tailwindcss/oxide@4.2.2': + resolution: {integrity: sha512-qEUA07+E5kehxYp9BVMpq9E8vnJuBHfJEC0vPC5e7iL/hw7HR61aDKoVoKzrG+QKp56vhNZe4qwkRmMC0zDLvg==} + engines: {node: '>= 20'} + + '@tailwindcss/vite@4.2.2': + resolution: {integrity: sha512-mEiF5HO1QqCLXoNEfXVA1Tzo+cYsrqV7w9Juj2wdUFyW07JRenqMG225MvPwr3ZD9N1bFQj46X7r33iHxLUW0w==} + peerDependencies: + vite: ^5.2.0 || ^6 || ^7 || ^8 + + '@tybys/wasm-util@0.10.1': + resolution: {integrity: sha512-9tTaPJLSiejZKx+Bmog4uSubteqTvFrVrURwkmHixBo0G4seD0zUxp98E1DzUBJxLQ3NPwXrGKDiVjwx/DpPsg==} + + '@types/d3-array@3.2.2': + resolution: {integrity: sha512-hOLWVbm7uRza0BYXpIIW5pxfrKe0W+D5lrFiAEYR+pb6w3N2SwSMaJbXdUfSEv+dT4MfHBLtn5js0LAWaO6otw==} + + '@types/d3-color@3.1.3': + resolution: {integrity: sha512-iO90scth9WAbmgv7ogoq57O9YpKmFBbmoEoCHDB2xMBY0+/KVrqAaCDyCE16dUspeOvIxFFRI+0sEtqDqy2b4A==} + + '@types/d3-ease@3.0.2': + resolution: {integrity: sha512-NcV1JjO5oDzoK26oMzbILE6HW7uVXOHLQvHshBUW4UMdZGfiY6v5BeQwh9a9tCzv+CeefZQHJt5SRgK154RtiA==} + + '@types/d3-interpolate@3.0.4': + resolution: {integrity: sha512-mgLPETlrpVV1YRJIglr4Ez47g7Yxjl1lj7YKsiMCb27VJH9W8NVM6Bb9d8kkpG/uAQS5AmbA48q2IAolKKo1MA==} + + '@types/d3-path@3.1.1': + resolution: {integrity: sha512-VMZBYyQvbGmWyWVea0EHs/BwLgxc+MKi1zLDCONksozI4YJMcTt8ZEuIR4Sb1MMTE8MMW49v0IwI5+b7RmfWlg==} + + '@types/d3-scale@4.0.9': + resolution: {integrity: sha512-dLmtwB8zkAeO/juAMfnV+sItKjlsw2lKdZVVy6LRr0cBmegxSABiLEpGVmSJJ8O08i4+sGR6qQtb6WtuwJdvVw==} + + '@types/d3-shape@3.1.8': + resolution: {integrity: sha512-lae0iWfcDeR7qt7rA88BNiqdvPS5pFVPpo5OfjElwNaT2yyekbM0C9vK+yqBqEmHr6lDkRnYNoTBYlAgJa7a4w==} + + '@types/d3-time@3.0.4': + resolution: {integrity: sha512-yuzZug1nkAAaBlBBikKZTgzCeA+k1uy4ZFwWANOfKw5z5LRhV0gNA7gNkKm7HoK+HRN0wX3EkxGk0fpbWhmB7g==} + + '@types/d3-timer@3.0.2': + resolution: {integrity: sha512-Ps3T8E8dZDam6fUyNiMkekK3XUsaUEik+idO9/YjPtfj2qruF8tFBXS7XhtE4iIXBLxhmLjP3SXpLhVf21I9Lw==} + + '@types/react-dom@19.2.3': + resolution: {integrity: sha512-jp2L/eY6fn+KgVVQAOqYItbF0VY/YApe5Mz2F0aykSO8gx31bYCZyvSeYxCHKvzHG5eZjc+zyaS5BrBWya2+kQ==} + peerDependencies: + '@types/react': ^19.2.0 + + '@types/react@19.2.14': + resolution: {integrity: sha512-ilcTH/UniCkMdtexkoCN0bI7pMcJDvmQFPvuPvmEaYA/NSfFTAgdUSLAoVjaRJm7+6PvcM+q1zYOwS4wTYMF9w==} + + '@types/use-sync-external-store@0.0.6': + resolution: {integrity: sha512-zFDAD+tlpf2r4asuHEj0XH6pY6i0g5NeAHPn+15wk3BV6JA69eERFXC1gyGThDkVa1zCyKr5jox1+2LbV/AMLg==} + + '@vitejs/plugin-react@6.0.1': + resolution: {integrity: sha512-l9X/E3cDb+xY3SWzlG1MOGt2usfEHGMNIaegaUGFsLkb3RCn/k8/TOXBcab+OndDI4TBtktT8/9BwwW8Vi9KUQ==} + engines: {node: ^20.19.0 || >=22.12.0} + peerDependencies: + '@rolldown/plugin-babel': ^0.1.7 || ^0.2.0 + babel-plugin-react-compiler: ^1.0.0 + vite: ^8.0.0 + peerDependenciesMeta: + '@rolldown/plugin-babel': + optional: true + babel-plugin-react-compiler: + optional: true + + autoprefixer@10.4.27: + resolution: {integrity: sha512-NP9APE+tO+LuJGn7/9+cohklunJsXWiaWEfV3si4Gi/XHDwVNgkwr1J3RQYFIvPy76GmJ9/bW8vyoU1LcxwKHA==} + engines: {node: ^10 || ^12 || >=14} + hasBin: true + peerDependencies: + postcss: ^8.1.0 + + baseline-browser-mapping@2.10.9: + resolution: {integrity: sha512-OZd0e2mU11ClX8+IdXe3r0dbqMEznRiT4TfbhYIbcRPZkqJ7Qwer8ij3GZAmLsRKa+II9V1v5czCkvmHH3XZBg==} + engines: {node: '>=6.0.0'} + hasBin: true + + browserslist@4.28.1: + resolution: {integrity: sha512-ZC5Bd0LgJXgwGqUknZY/vkUQ04r8NXnJZ3yYi4vDmSiZmC/pdSN0NbNRPxZpbtO4uAfDUAFffO8IZoM3Gj8IkA==} + engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} + hasBin: true + + caniuse-lite@1.0.30001780: + resolution: {integrity: sha512-llngX0E7nQci5BPJDqoZSbuZ5Bcs9F5db7EtgfwBerX9XGtkkiO4NwfDDIRzHTTwcYC8vC7bmeUEPGrKlR/TkQ==} + + clsx@2.1.1: + resolution: {integrity: sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==} + engines: {node: '>=6'} + + cookie@1.1.1: + resolution: {integrity: sha512-ei8Aos7ja0weRpFzJnEA9UHJ/7XQmqglbRwnf2ATjcB9Wq874VKH9kfjjirM6UhU2/E5fFYadylyhFldcqSidQ==} + engines: {node: '>=18'} + + csstype@3.2.3: + resolution: {integrity: sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==} + + d3-array@3.2.4: + resolution: {integrity: sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg==} + engines: {node: '>=12'} + + d3-color@3.1.0: + resolution: {integrity: sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==} + engines: {node: '>=12'} + + d3-ease@3.0.1: + resolution: {integrity: sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w==} + engines: {node: '>=12'} + + d3-format@3.1.2: + resolution: {integrity: sha512-AJDdYOdnyRDV5b6ArilzCPPwc1ejkHcoyFarqlPqT7zRYjhavcT3uSrqcMvsgh2CgoPbK3RCwyHaVyxYcP2Arg==} + engines: {node: '>=12'} + + d3-interpolate@3.0.1: + resolution: {integrity: sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g==} + engines: {node: '>=12'} + + d3-path@3.1.0: + resolution: {integrity: sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ==} + engines: {node: '>=12'} + + d3-scale@4.0.2: + resolution: {integrity: sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ==} + engines: {node: '>=12'} + + d3-shape@3.2.0: + resolution: {integrity: sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA==} + engines: {node: '>=12'} + + d3-time-format@4.1.0: + resolution: {integrity: sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg==} + engines: {node: '>=12'} + + d3-time@3.1.0: + resolution: {integrity: sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q==} + engines: {node: '>=12'} + + d3-timer@3.0.1: + resolution: {integrity: sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA==} + engines: {node: '>=12'} + + decimal.js-light@2.5.1: + resolution: {integrity: sha512-qIMFpTMZmny+MMIitAB6D7iVPEorVw6YQRWkvarTkT4tBeSLLiHzcwj6q0MmYSFCiVpiqPJTJEYIrpcPzVEIvg==} + + detect-libc@2.1.2: + resolution: {integrity: sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==} + engines: {node: '>=8'} + + electron-to-chromium@1.5.321: + resolution: {integrity: sha512-L2C7Q279W2D/J4PLZLk7sebOILDSWos7bMsMNN06rK482umHUrh/3lM8G7IlHFOYip2oAg5nha1rCMxr/rs6ZQ==} + + enhanced-resolve@5.20.1: + resolution: {integrity: sha512-Qohcme7V1inbAfvjItgw0EaxVX5q2rdVEZHRBrEQdRZTssLDGsL8Lwrznl8oQ/6kuTJONLaDcGjkNP247XEhcA==} + engines: {node: '>=10.13.0'} + + es-toolkit@1.45.1: + resolution: {integrity: sha512-/jhoOj/Fx+A+IIyDNOvO3TItGmlMKhtX8ISAHKE90c4b/k1tqaqEZ+uUqfpU8DMnW5cgNJv606zS55jGvza0Xw==} + + escalade@3.2.0: + resolution: {integrity: sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==} + engines: {node: '>=6'} + + eventemitter3@5.0.4: + resolution: {integrity: sha512-mlsTRyGaPBjPedk6Bvw+aqbsXDtoAyAzm5MO7JgU+yVRyMQ5O8bD4Kcci7BS85f93veegeCPkL8R4GLClnjLFw==} + + fdir@6.5.0: + resolution: {integrity: sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==} + engines: {node: '>=12.0.0'} + peerDependencies: + picomatch: ^3 || ^4 + peerDependenciesMeta: + picomatch: + optional: true + + fraction.js@5.3.4: + resolution: {integrity: sha512-1X1NTtiJphryn/uLQz3whtY6jK3fTqoE3ohKs0tT+Ujr1W59oopxmoEh7Lu5p6vBaPbgoM0bzveAW4Qi5RyWDQ==} + + fsevents@2.3.3: + resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==} + engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} + os: [darwin] + + graceful-fs@4.2.11: + resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==} + + immer@10.2.0: + resolution: {integrity: sha512-d/+XTN3zfODyjr89gM3mPq1WNX2B8pYsu7eORitdwyA2sBubnTl3laYlBk4sXY5FUa5qTZGBDPJICVbvqzjlbw==} + + immer@11.1.4: + resolution: {integrity: sha512-XREFCPo6ksxVzP4E0ekD5aMdf8WMwmdNaz6vuvxgI40UaEiu6q3p8X52aU6GdyvLY3XXX/8R7JOTXStz/nBbRw==} + + internmap@2.0.3: + resolution: {integrity: sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg==} + engines: {node: '>=12'} + + jiti@2.6.1: + resolution: {integrity: sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ==} + hasBin: true + + lightningcss-android-arm64@1.32.0: + resolution: {integrity: sha512-YK7/ClTt4kAK0vo6w3X+Pnm0D2cf2vPHbhOXdoNti1Ga0al1P4TBZhwjATvjNwLEBCnKvjJc2jQgHXH0NEwlAg==} + engines: {node: '>= 12.0.0'} + cpu: [arm64] + os: [android] + + lightningcss-darwin-arm64@1.32.0: + resolution: {integrity: sha512-RzeG9Ju5bag2Bv1/lwlVJvBE3q6TtXskdZLLCyfg5pt+HLz9BqlICO7LZM7VHNTTn/5PRhHFBSjk5lc4cmscPQ==} + engines: {node: '>= 12.0.0'} + cpu: [arm64] + os: [darwin] + + lightningcss-darwin-x64@1.32.0: + resolution: {integrity: sha512-U+QsBp2m/s2wqpUYT/6wnlagdZbtZdndSmut/NJqlCcMLTWp5muCrID+K5UJ6jqD2BFshejCYXniPDbNh73V8w==} + engines: {node: '>= 12.0.0'} + cpu: [x64] + os: [darwin] + + lightningcss-freebsd-x64@1.32.0: + resolution: {integrity: sha512-JCTigedEksZk3tHTTthnMdVfGf61Fky8Ji2E4YjUTEQX14xiy/lTzXnu1vwiZe3bYe0q+SpsSH/CTeDXK6WHig==} + engines: {node: '>= 12.0.0'} + cpu: [x64] + os: [freebsd] + + lightningcss-linux-arm-gnueabihf@1.32.0: + resolution: {integrity: sha512-x6rnnpRa2GL0zQOkt6rts3YDPzduLpWvwAF6EMhXFVZXD4tPrBkEFqzGowzCsIWsPjqSK+tyNEODUBXeeVHSkw==} + engines: {node: '>= 12.0.0'} + cpu: [arm] + os: [linux] + + lightningcss-linux-arm64-gnu@1.32.0: + resolution: {integrity: sha512-0nnMyoyOLRJXfbMOilaSRcLH3Jw5z9HDNGfT/gwCPgaDjnx0i8w7vBzFLFR1f6CMLKF8gVbebmkUN3fa/kQJpQ==} + engines: {node: '>= 12.0.0'} + cpu: [arm64] + os: [linux] + libc: [glibc] + + lightningcss-linux-arm64-musl@1.32.0: + resolution: {integrity: sha512-UpQkoenr4UJEzgVIYpI80lDFvRmPVg6oqboNHfoH4CQIfNA+HOrZ7Mo7KZP02dC6LjghPQJeBsvXhJod/wnIBg==} + engines: {node: '>= 12.0.0'} + cpu: [arm64] + os: [linux] + libc: [musl] + + lightningcss-linux-x64-gnu@1.32.0: + resolution: {integrity: sha512-V7Qr52IhZmdKPVr+Vtw8o+WLsQJYCTd8loIfpDaMRWGUZfBOYEJeyJIkqGIDMZPwPx24pUMfwSxxI8phr/MbOA==} + engines: {node: '>= 12.0.0'} + cpu: [x64] + os: [linux] + libc: [glibc] + + lightningcss-linux-x64-musl@1.32.0: + resolution: {integrity: sha512-bYcLp+Vb0awsiXg/80uCRezCYHNg1/l3mt0gzHnWV9XP1W5sKa5/TCdGWaR/zBM2PeF/HbsQv/j2URNOiVuxWg==} + engines: {node: '>= 12.0.0'} + cpu: [x64] + os: [linux] + libc: [musl] + + lightningcss-win32-arm64-msvc@1.32.0: + resolution: {integrity: sha512-8SbC8BR40pS6baCM8sbtYDSwEVQd4JlFTOlaD3gWGHfThTcABnNDBda6eTZeqbofalIJhFx0qKzgHJmcPTnGdw==} + engines: {node: '>= 12.0.0'} + cpu: [arm64] + os: [win32] + + lightningcss-win32-x64-msvc@1.32.0: + resolution: {integrity: sha512-Amq9B/SoZYdDi1kFrojnoqPLxYhQ4Wo5XiL8EVJrVsB8ARoC1PWW6VGtT0WKCemjy8aC+louJnjS7U18x3b06Q==} + engines: {node: '>= 12.0.0'} + cpu: [x64] + os: [win32] + + lightningcss@1.32.0: + resolution: {integrity: sha512-NXYBzinNrblfraPGyrbPoD19C1h9lfI/1mzgWYvXUTe414Gz/X1FD2XBZSZM7rRTrMA8JL3OtAaGifrIKhQ5yQ==} + engines: {node: '>= 12.0.0'} + + magic-string@0.30.21: + resolution: {integrity: sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==} + + nanoid@3.3.11: + resolution: {integrity: sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==} + engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} + hasBin: true + + node-releases@2.0.36: + resolution: {integrity: sha512-TdC8FSgHz8Mwtw9g5L4gR/Sh9XhSP/0DEkQxfEFXOpiul5IiHgHan2VhYYb6agDSfp4KuvltmGApc8HMgUrIkA==} + + picocolors@1.1.1: + resolution: {integrity: sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==} + + picomatch@4.0.3: + resolution: {integrity: sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==} + engines: {node: '>=12'} + + postcss-value-parser@4.2.0: + resolution: {integrity: sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==} + + postcss@8.5.8: + resolution: {integrity: sha512-OW/rX8O/jXnm82Ey1k44pObPtdblfiuWnrd8X7GJ7emImCOstunGbXUpp7HdBrFQX6rJzn3sPT397Wp5aCwCHg==} + engines: {node: ^10 || ^12 || >=14} + + react-dom@19.2.4: + resolution: {integrity: sha512-AXJdLo8kgMbimY95O2aKQqsz2iWi9jMgKJhRBAxECE4IFxfcazB2LmzloIoibJI3C12IlY20+KFaLv+71bUJeQ==} + peerDependencies: + react: ^19.2.4 + + react-is@19.2.4: + resolution: {integrity: sha512-W+EWGn2v0ApPKgKKCy/7s7WHXkboGcsrXE+2joLyVxkbyVQfO3MUEaUQDHoSmb8TFFrSKYa9mw64WZHNHSDzYA==} + + react-redux@9.2.0: + resolution: {integrity: sha512-ROY9fvHhwOD9ySfrF0wmvu//bKCQ6AeZZq1nJNtbDC+kk5DuSuNX/n6YWYF/SYy7bSba4D4FSz8DJeKY/S/r+g==} + peerDependencies: + '@types/react': ^18.2.25 || ^19 + react: ^18.0 || ^19 + redux: ^5.0.0 + peerDependenciesMeta: + '@types/react': + optional: true + redux: + optional: true + + react-router-dom@7.13.1: + resolution: {integrity: sha512-UJnV3Rxc5TgUPJt2KJpo1Jpy0OKQr0AjgbZzBFjaPJcFOb2Y8jA5H3LT8HUJAiRLlWrEXWHbF1Z4SCZaQjWDHw==} + engines: {node: '>=20.0.0'} + peerDependencies: + react: '>=18' + react-dom: '>=18' + + react-router@7.13.1: + resolution: {integrity: sha512-td+xP4X2/6BJvZoX6xw++A2DdEi++YypA69bJUV5oVvqf6/9/9nNlD70YO1e9d3MyamJEBQFEzk6mbfDYbqrSA==} + engines: {node: '>=20.0.0'} + peerDependencies: + react: '>=18' + react-dom: '>=18' + peerDependenciesMeta: + react-dom: + optional: true + + react@19.2.4: + resolution: {integrity: sha512-9nfp2hYpCwOjAN+8TZFGhtWEwgvWHXqESH8qT89AT/lWklpLON22Lc8pEtnpsZz7VmawabSU0gCjnj8aC0euHQ==} + engines: {node: '>=0.10.0'} + + recharts@3.8.0: + resolution: {integrity: sha512-Z/m38DX3L73ExO4Tpc9/iZWHmHnlzWG4njQbxsF5aSjwqmHNDDIm0rdEBArkwsBvR8U6EirlEHiQNYWCVh9sGQ==} + engines: {node: '>=18'} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + react-dom: ^16.0.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + react-is: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + + redux-thunk@3.1.0: + resolution: {integrity: sha512-NW2r5T6ksUKXCabzhL9z+h206HQw/NJkcLm1GPImRQ8IzfXwRGqjVhKJGauHirT0DAuyy6hjdnMZaRoAcy0Klw==} + peerDependencies: + redux: ^5.0.0 + + redux@5.0.1: + resolution: {integrity: sha512-M9/ELqF6fy8FwmkpnF0S3YKOqMyoWJ4+CS5Efg2ct3oY9daQvd/Pc71FpGZsVsbl3Cpb+IIcjBDUnnyBdQbq4w==} + + reselect@5.1.1: + resolution: {integrity: sha512-K/BG6eIky/SBpzfHZv/dd+9JBFiS4SWV7FIujVyJRux6e45+73RaUHXLmIR1f7WOMaQ0U1km6qwklRQxpJJY0w==} + + rolldown@1.0.0-rc.10: + resolution: {integrity: sha512-q7j6vvarRFmKpgJUT8HCAUljkgzEp4LAhPlJUvQhA5LA1SUL36s5QCysMutErzL3EbNOZOkoziSx9iZC4FddKA==} + engines: {node: ^20.19.0 || >=22.12.0} + hasBin: true + + scheduler@0.27.0: + resolution: {integrity: sha512-eNv+WrVbKu1f3vbYJT/xtiF5syA5HPIMtf9IgY/nKg0sWqzAUEvqY/xm7OcZc/qafLx/iO9FgOmeSAp4v5ti/Q==} + + set-cookie-parser@2.7.2: + resolution: {integrity: sha512-oeM1lpU/UvhTxw+g3cIfxXHyJRc/uidd3yK1P242gzHds0udQBYzs3y8j4gCCW+ZJ7ad0yctld8RYO+bdurlvw==} + + source-map-js@1.2.1: + resolution: {integrity: sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==} + engines: {node: '>=0.10.0'} + + tailwindcss@4.2.2: + resolution: {integrity: sha512-KWBIxs1Xb6NoLdMVqhbhgwZf2PGBpPEiwOqgI4pFIYbNTfBXiKYyWoTsXgBQ9WFg/OlhnvHaY+AEpW7wSmFo2Q==} + + tapable@2.3.0: + resolution: {integrity: sha512-g9ljZiwki/LfxmQADO3dEY1CbpmXT5Hm2fJ+QaGKwSXUylMybePR7/67YW7jOrrvjEgL1Fmz5kzyAjWVWLlucg==} + engines: {node: '>=6'} + + tiny-invariant@1.3.3: + resolution: {integrity: sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg==} + + tinyglobby@0.2.15: + resolution: {integrity: sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==} + engines: {node: '>=12.0.0'} + + tslib@2.8.1: + resolution: {integrity: sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==} + + typescript@5.9.3: + resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==} + engines: {node: '>=14.17'} + hasBin: true + + update-browserslist-db@1.2.3: + resolution: {integrity: sha512-Js0m9cx+qOgDxo0eMiFGEueWztz+d4+M3rGlmKPT+T4IS/jP4ylw3Nwpu6cpTTP8R1MAC1kF4VbdLt3ARf209w==} + hasBin: true + peerDependencies: + browserslist: '>= 4.21.0' + + use-sync-external-store@1.6.0: + resolution: {integrity: sha512-Pp6GSwGP/NrPIrxVFAIkOQeyw8lFenOHijQWkUTrDvrF4ALqylP2C/KCkeS9dpUM3KvYRQhna5vt7IL95+ZQ9w==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + + victory-vendor@37.3.6: + resolution: {integrity: sha512-SbPDPdDBYp+5MJHhBCAyI7wKM3d5ivekigc2Dk2s7pgbZ9wIgIBYGVw4zGHBml/qTFbexrofXW6Gu4noGxrOwQ==} + + vite@8.0.1: + resolution: {integrity: sha512-wt+Z2qIhfFt85uiyRt5LPU4oVEJBXj8hZNWKeqFG4gRG/0RaRGJ7njQCwzFVjO+v4+Ipmf5CY7VdmZRAYYBPHw==} + engines: {node: ^20.19.0 || >=22.12.0} + hasBin: true + peerDependencies: + '@types/node': ^20.19.0 || >=22.12.0 + '@vitejs/devtools': ^0.1.0 + esbuild: ^0.27.0 + jiti: '>=1.21.0' + less: ^4.0.0 + sass: ^1.70.0 + sass-embedded: ^1.70.0 + stylus: '>=0.54.8' + sugarss: ^5.0.0 + terser: ^5.16.0 + tsx: ^4.8.1 + yaml: ^2.4.2 + peerDependenciesMeta: + '@types/node': + optional: true + '@vitejs/devtools': + optional: true + esbuild: + optional: true + jiti: + optional: true + less: + optional: true + sass: + optional: true + sass-embedded: + optional: true + stylus: + optional: true + sugarss: + optional: true + terser: + optional: true + tsx: + optional: true + yaml: + optional: true + + zustand@5.0.12: + resolution: {integrity: sha512-i77ae3aZq4dhMlRhJVCYgMLKuSiZAaUPAct2AksxQ+gOtimhGMdXljRT21P5BNpeT4kXlLIckvkPM029OljD7g==} + engines: {node: '>=12.20.0'} + peerDependencies: + '@types/react': '>=18.0.0' + immer: '>=9.0.6' + react: '>=18.0.0' + use-sync-external-store: '>=1.2.0' + peerDependenciesMeta: + '@types/react': + optional: true + immer: + optional: true + react: + optional: true + use-sync-external-store: + optional: true + +snapshots: + + '@emnapi/core@1.9.1': + dependencies: + '@emnapi/wasi-threads': 1.2.0 + tslib: 2.8.1 + optional: true + + '@emnapi/runtime@1.9.1': + dependencies: + tslib: 2.8.1 + optional: true + + '@emnapi/wasi-threads@1.2.0': + dependencies: + tslib: 2.8.1 + optional: true + + '@jridgewell/gen-mapping@0.3.13': + dependencies: + '@jridgewell/sourcemap-codec': 1.5.5 + '@jridgewell/trace-mapping': 0.3.31 + + '@jridgewell/remapping@2.3.5': + dependencies: + '@jridgewell/gen-mapping': 0.3.13 + '@jridgewell/trace-mapping': 0.3.31 + + '@jridgewell/resolve-uri@3.1.2': {} + + '@jridgewell/sourcemap-codec@1.5.5': {} + + '@jridgewell/trace-mapping@0.3.31': + dependencies: + '@jridgewell/resolve-uri': 3.1.2 + '@jridgewell/sourcemap-codec': 1.5.5 + + '@napi-rs/wasm-runtime@1.1.1': + dependencies: + '@emnapi/core': 1.9.1 + '@emnapi/runtime': 1.9.1 + '@tybys/wasm-util': 0.10.1 + optional: true + + '@oxc-project/types@0.120.0': {} + + '@reduxjs/toolkit@2.11.2(react-redux@9.2.0(@types/react@19.2.14)(react@19.2.4)(redux@5.0.1))(react@19.2.4)': + dependencies: + '@standard-schema/spec': 1.1.0 + '@standard-schema/utils': 0.3.0 + immer: 11.1.4 + redux: 5.0.1 + redux-thunk: 3.1.0(redux@5.0.1) + reselect: 5.1.1 + optionalDependencies: + react: 19.2.4 + react-redux: 9.2.0(@types/react@19.2.14)(react@19.2.4)(redux@5.0.1) + + '@rolldown/binding-android-arm64@1.0.0-rc.10': + optional: true + + '@rolldown/binding-darwin-arm64@1.0.0-rc.10': + optional: true + + '@rolldown/binding-darwin-x64@1.0.0-rc.10': + optional: true + + '@rolldown/binding-freebsd-x64@1.0.0-rc.10': + optional: true + + '@rolldown/binding-linux-arm-gnueabihf@1.0.0-rc.10': + optional: true + + '@rolldown/binding-linux-arm64-gnu@1.0.0-rc.10': + optional: true + + '@rolldown/binding-linux-arm64-musl@1.0.0-rc.10': + optional: true + + '@rolldown/binding-linux-ppc64-gnu@1.0.0-rc.10': + optional: true + + '@rolldown/binding-linux-s390x-gnu@1.0.0-rc.10': + optional: true + + '@rolldown/binding-linux-x64-gnu@1.0.0-rc.10': + optional: true + + '@rolldown/binding-linux-x64-musl@1.0.0-rc.10': + optional: true + + '@rolldown/binding-openharmony-arm64@1.0.0-rc.10': + optional: true + + '@rolldown/binding-wasm32-wasi@1.0.0-rc.10': + dependencies: + '@napi-rs/wasm-runtime': 1.1.1 + optional: true + + '@rolldown/binding-win32-arm64-msvc@1.0.0-rc.10': + optional: true + + '@rolldown/binding-win32-x64-msvc@1.0.0-rc.10': + optional: true + + '@rolldown/pluginutils@1.0.0-rc.10': {} + + '@rolldown/pluginutils@1.0.0-rc.7': {} + + '@standard-schema/spec@1.1.0': {} + + '@standard-schema/utils@0.3.0': {} + + '@tailwindcss/node@4.2.2': + dependencies: + '@jridgewell/remapping': 2.3.5 + enhanced-resolve: 5.20.1 + jiti: 2.6.1 + lightningcss: 1.32.0 + magic-string: 0.30.21 + source-map-js: 1.2.1 + tailwindcss: 4.2.2 + + '@tailwindcss/oxide-android-arm64@4.2.2': + optional: true + + '@tailwindcss/oxide-darwin-arm64@4.2.2': + optional: true + + '@tailwindcss/oxide-darwin-x64@4.2.2': + optional: true + + '@tailwindcss/oxide-freebsd-x64@4.2.2': + optional: true + + '@tailwindcss/oxide-linux-arm-gnueabihf@4.2.2': + optional: true + + '@tailwindcss/oxide-linux-arm64-gnu@4.2.2': + optional: true + + '@tailwindcss/oxide-linux-arm64-musl@4.2.2': + optional: true + + '@tailwindcss/oxide-linux-x64-gnu@4.2.2': + optional: true + + '@tailwindcss/oxide-linux-x64-musl@4.2.2': + optional: true + + '@tailwindcss/oxide-wasm32-wasi@4.2.2': + optional: true + + '@tailwindcss/oxide-win32-arm64-msvc@4.2.2': + optional: true + + '@tailwindcss/oxide-win32-x64-msvc@4.2.2': + optional: true + + '@tailwindcss/oxide@4.2.2': + optionalDependencies: + '@tailwindcss/oxide-android-arm64': 4.2.2 + '@tailwindcss/oxide-darwin-arm64': 4.2.2 + '@tailwindcss/oxide-darwin-x64': 4.2.2 + '@tailwindcss/oxide-freebsd-x64': 4.2.2 + '@tailwindcss/oxide-linux-arm-gnueabihf': 4.2.2 + '@tailwindcss/oxide-linux-arm64-gnu': 4.2.2 + '@tailwindcss/oxide-linux-arm64-musl': 4.2.2 + '@tailwindcss/oxide-linux-x64-gnu': 4.2.2 + '@tailwindcss/oxide-linux-x64-musl': 4.2.2 + '@tailwindcss/oxide-wasm32-wasi': 4.2.2 + '@tailwindcss/oxide-win32-arm64-msvc': 4.2.2 + '@tailwindcss/oxide-win32-x64-msvc': 4.2.2 + + '@tailwindcss/vite@4.2.2(vite@8.0.1(jiti@2.6.1))': + dependencies: + '@tailwindcss/node': 4.2.2 + '@tailwindcss/oxide': 4.2.2 + tailwindcss: 4.2.2 + vite: 8.0.1(jiti@2.6.1) + + '@tybys/wasm-util@0.10.1': + dependencies: + tslib: 2.8.1 + optional: true + + '@types/d3-array@3.2.2': {} + + '@types/d3-color@3.1.3': {} + + '@types/d3-ease@3.0.2': {} + + '@types/d3-interpolate@3.0.4': + dependencies: + '@types/d3-color': 3.1.3 + + '@types/d3-path@3.1.1': {} + + '@types/d3-scale@4.0.9': + dependencies: + '@types/d3-time': 3.0.4 + + '@types/d3-shape@3.1.8': + dependencies: + '@types/d3-path': 3.1.1 + + '@types/d3-time@3.0.4': {} + + '@types/d3-timer@3.0.2': {} + + '@types/react-dom@19.2.3(@types/react@19.2.14)': + dependencies: + '@types/react': 19.2.14 + + '@types/react@19.2.14': + dependencies: + csstype: 3.2.3 + + '@types/use-sync-external-store@0.0.6': {} + + '@vitejs/plugin-react@6.0.1(vite@8.0.1(jiti@2.6.1))': + dependencies: + '@rolldown/pluginutils': 1.0.0-rc.7 + vite: 8.0.1(jiti@2.6.1) + + autoprefixer@10.4.27(postcss@8.5.8): + dependencies: + browserslist: 4.28.1 + caniuse-lite: 1.0.30001780 + fraction.js: 5.3.4 + picocolors: 1.1.1 + postcss: 8.5.8 + postcss-value-parser: 4.2.0 + + baseline-browser-mapping@2.10.9: {} + + browserslist@4.28.1: + dependencies: + baseline-browser-mapping: 2.10.9 + caniuse-lite: 1.0.30001780 + electron-to-chromium: 1.5.321 + node-releases: 2.0.36 + update-browserslist-db: 1.2.3(browserslist@4.28.1) + + caniuse-lite@1.0.30001780: {} + + clsx@2.1.1: {} + + cookie@1.1.1: {} + + csstype@3.2.3: {} + + d3-array@3.2.4: + dependencies: + internmap: 2.0.3 + + d3-color@3.1.0: {} + + d3-ease@3.0.1: {} + + d3-format@3.1.2: {} + + d3-interpolate@3.0.1: + dependencies: + d3-color: 3.1.0 + + d3-path@3.1.0: {} + + d3-scale@4.0.2: + dependencies: + d3-array: 3.2.4 + d3-format: 3.1.2 + d3-interpolate: 3.0.1 + d3-time: 3.1.0 + d3-time-format: 4.1.0 + + d3-shape@3.2.0: + dependencies: + d3-path: 3.1.0 + + d3-time-format@4.1.0: + dependencies: + d3-time: 3.1.0 + + d3-time@3.1.0: + dependencies: + d3-array: 3.2.4 + + d3-timer@3.0.1: {} + + decimal.js-light@2.5.1: {} + + detect-libc@2.1.2: {} + + electron-to-chromium@1.5.321: {} + + enhanced-resolve@5.20.1: + dependencies: + graceful-fs: 4.2.11 + tapable: 2.3.0 + + es-toolkit@1.45.1: {} + + escalade@3.2.0: {} + + eventemitter3@5.0.4: {} + + fdir@6.5.0(picomatch@4.0.3): + optionalDependencies: + picomatch: 4.0.3 + + fraction.js@5.3.4: {} + + fsevents@2.3.3: + optional: true + + graceful-fs@4.2.11: {} + + immer@10.2.0: {} + + immer@11.1.4: {} + + internmap@2.0.3: {} + + jiti@2.6.1: {} + + lightningcss-android-arm64@1.32.0: + optional: true + + lightningcss-darwin-arm64@1.32.0: + optional: true + + lightningcss-darwin-x64@1.32.0: + optional: true + + lightningcss-freebsd-x64@1.32.0: + optional: true + + lightningcss-linux-arm-gnueabihf@1.32.0: + optional: true + + lightningcss-linux-arm64-gnu@1.32.0: + optional: true + + lightningcss-linux-arm64-musl@1.32.0: + optional: true + + lightningcss-linux-x64-gnu@1.32.0: + optional: true + + lightningcss-linux-x64-musl@1.32.0: + optional: true + + lightningcss-win32-arm64-msvc@1.32.0: + optional: true + + lightningcss-win32-x64-msvc@1.32.0: + optional: true + + lightningcss@1.32.0: + dependencies: + detect-libc: 2.1.2 + optionalDependencies: + lightningcss-android-arm64: 1.32.0 + lightningcss-darwin-arm64: 1.32.0 + lightningcss-darwin-x64: 1.32.0 + lightningcss-freebsd-x64: 1.32.0 + lightningcss-linux-arm-gnueabihf: 1.32.0 + lightningcss-linux-arm64-gnu: 1.32.0 + lightningcss-linux-arm64-musl: 1.32.0 + lightningcss-linux-x64-gnu: 1.32.0 + lightningcss-linux-x64-musl: 1.32.0 + lightningcss-win32-arm64-msvc: 1.32.0 + lightningcss-win32-x64-msvc: 1.32.0 + + magic-string@0.30.21: + dependencies: + '@jridgewell/sourcemap-codec': 1.5.5 + + nanoid@3.3.11: {} + + node-releases@2.0.36: {} + + picocolors@1.1.1: {} + + picomatch@4.0.3: {} + + postcss-value-parser@4.2.0: {} + + postcss@8.5.8: + dependencies: + nanoid: 3.3.11 + picocolors: 1.1.1 + source-map-js: 1.2.1 + + react-dom@19.2.4(react@19.2.4): + dependencies: + react: 19.2.4 + scheduler: 0.27.0 + + react-is@19.2.4: {} + + react-redux@9.2.0(@types/react@19.2.14)(react@19.2.4)(redux@5.0.1): + dependencies: + '@types/use-sync-external-store': 0.0.6 + react: 19.2.4 + use-sync-external-store: 1.6.0(react@19.2.4) + optionalDependencies: + '@types/react': 19.2.14 + redux: 5.0.1 + + react-router-dom@7.13.1(react-dom@19.2.4(react@19.2.4))(react@19.2.4): + dependencies: + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + react-router: 7.13.1(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + + react-router@7.13.1(react-dom@19.2.4(react@19.2.4))(react@19.2.4): + dependencies: + cookie: 1.1.1 + react: 19.2.4 + set-cookie-parser: 2.7.2 + optionalDependencies: + react-dom: 19.2.4(react@19.2.4) + + react@19.2.4: {} + + recharts@3.8.0(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react-is@19.2.4)(react@19.2.4)(redux@5.0.1): + dependencies: + '@reduxjs/toolkit': 2.11.2(react-redux@9.2.0(@types/react@19.2.14)(react@19.2.4)(redux@5.0.1))(react@19.2.4) + clsx: 2.1.1 + decimal.js-light: 2.5.1 + es-toolkit: 1.45.1 + eventemitter3: 5.0.4 + immer: 10.2.0 + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + react-is: 19.2.4 + react-redux: 9.2.0(@types/react@19.2.14)(react@19.2.4)(redux@5.0.1) + reselect: 5.1.1 + tiny-invariant: 1.3.3 + use-sync-external-store: 1.6.0(react@19.2.4) + victory-vendor: 37.3.6 + transitivePeerDependencies: + - '@types/react' + - redux + + redux-thunk@3.1.0(redux@5.0.1): + dependencies: + redux: 5.0.1 + + redux@5.0.1: {} + + reselect@5.1.1: {} + + rolldown@1.0.0-rc.10: + dependencies: + '@oxc-project/types': 0.120.0 + '@rolldown/pluginutils': 1.0.0-rc.10 + optionalDependencies: + '@rolldown/binding-android-arm64': 1.0.0-rc.10 + '@rolldown/binding-darwin-arm64': 1.0.0-rc.10 + '@rolldown/binding-darwin-x64': 1.0.0-rc.10 + '@rolldown/binding-freebsd-x64': 1.0.0-rc.10 + '@rolldown/binding-linux-arm-gnueabihf': 1.0.0-rc.10 + '@rolldown/binding-linux-arm64-gnu': 1.0.0-rc.10 + '@rolldown/binding-linux-arm64-musl': 1.0.0-rc.10 + '@rolldown/binding-linux-ppc64-gnu': 1.0.0-rc.10 + '@rolldown/binding-linux-s390x-gnu': 1.0.0-rc.10 + '@rolldown/binding-linux-x64-gnu': 1.0.0-rc.10 + '@rolldown/binding-linux-x64-musl': 1.0.0-rc.10 + '@rolldown/binding-openharmony-arm64': 1.0.0-rc.10 + '@rolldown/binding-wasm32-wasi': 1.0.0-rc.10 + '@rolldown/binding-win32-arm64-msvc': 1.0.0-rc.10 + '@rolldown/binding-win32-x64-msvc': 1.0.0-rc.10 + + scheduler@0.27.0: {} + + set-cookie-parser@2.7.2: {} + + source-map-js@1.2.1: {} + + tailwindcss@4.2.2: {} + + tapable@2.3.0: {} + + tiny-invariant@1.3.3: {} + + tinyglobby@0.2.15: + dependencies: + fdir: 6.5.0(picomatch@4.0.3) + picomatch: 4.0.3 + + tslib@2.8.1: + optional: true + + typescript@5.9.3: {} + + update-browserslist-db@1.2.3(browserslist@4.28.1): + dependencies: + browserslist: 4.28.1 + escalade: 3.2.0 + picocolors: 1.1.1 + + use-sync-external-store@1.6.0(react@19.2.4): + dependencies: + react: 19.2.4 + + victory-vendor@37.3.6: + dependencies: + '@types/d3-array': 3.2.2 + '@types/d3-ease': 3.0.2 + '@types/d3-interpolate': 3.0.4 + '@types/d3-scale': 4.0.9 + '@types/d3-shape': 3.1.8 + '@types/d3-time': 3.0.4 + '@types/d3-timer': 3.0.2 + d3-array: 3.2.4 + d3-ease: 3.0.1 + d3-interpolate: 3.0.1 + d3-scale: 4.0.2 + d3-shape: 3.2.0 + d3-time: 3.1.0 + d3-timer: 3.0.1 + + vite@8.0.1(jiti@2.6.1): + dependencies: + lightningcss: 1.32.0 + picomatch: 4.0.3 + postcss: 8.5.8 + rolldown: 1.0.0-rc.10 + tinyglobby: 0.2.15 + optionalDependencies: + fsevents: 2.3.3 + jiti: 2.6.1 + + zustand@5.0.12(@types/react@19.2.14)(immer@11.1.4)(react@19.2.4)(use-sync-external-store@1.6.0(react@19.2.4)): + optionalDependencies: + '@types/react': 19.2.14 + immer: 11.1.4 + react: 19.2.4 + use-sync-external-store: 1.6.0(react@19.2.4) diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx new file mode 100755 index 0000000..266af0b --- /dev/null +++ b/frontend/src/App.tsx @@ -0,0 +1,22 @@ +import { createBrowserRouter, RouterProvider } from 'react-router-dom'; +import { PageShell } from './components/layout/PageShell'; +import { ExercisesPage } from './pages/ExercisesPage'; +import { SetsPage } from './pages/SetsPage'; +import { TrainingPage } from './pages/TrainingPage'; +import { HistoryPage } from './pages/HistoryPage'; + +const router = createBrowserRouter([ + { + element: , + children: [ + { path: '/', element: }, + { path: '/sets', element: }, + { path: '/training', element: }, + { path: '/history', element: }, + ], + }, +]); + +export function App() { + return ; +} diff --git a/frontend/src/api/client.ts b/frontend/src/api/client.ts new file mode 100755 index 0000000..d976430 --- /dev/null +++ b/frontend/src/api/client.ts @@ -0,0 +1,178 @@ +import type { + Exercise, + TrainingSet, + Session, + SessionLog, + LastLogResponse, + ExerciseStats, + CreateExerciseRequest, + CreateSetRequest, + UpdateSetRequest, + CreateSessionRequest, + CreateLogRequest, + UpdateLogRequest, +} from '../types'; + +export class ApiError extends Error { + constructor( + public status: number, + message: string, + ) { + super(message); + this.name = 'ApiError'; + } +} + +async function request( + url: string, + options?: RequestInit, +): Promise { + const res = await fetch(url, { + headers: { 'Content-Type': 'application/json' }, + ...options, + }); + + if (res.status === 204) { + return undefined as T; + } + + const data = await res.json(); + + if (!res.ok) { + throw new ApiError(res.status, data.error || 'Unbekannter Fehler'); + } + + return data as T; +} + +export const api = { + exercises: { + list(muscleGroup?: string, q?: string): Promise { + const params = new URLSearchParams(); + if (muscleGroup) params.set('muscle_group', muscleGroup); + if (q) params.set('q', q); + const qs = params.toString(); + return request(`/api/v1/exercises${qs ? '?' + qs : ''}`); + }, + + create(data: CreateExerciseRequest): Promise { + return request('/api/v1/exercises', { + method: 'POST', + body: JSON.stringify(data), + }); + }, + + update(id: number, data: CreateExerciseRequest): Promise { + return request(`/api/v1/exercises/${id}`, { + method: 'PUT', + body: JSON.stringify(data), + }); + }, + + delete(id: number): Promise { + return request(`/api/v1/exercises/${id}`, { + method: 'DELETE', + }); + }, + + lastLog(id: number): Promise { + return request(`/api/v1/exercises/${id}/last-log`); + }, + + history(id: number, limit?: number): Promise { + const params = new URLSearchParams(); + if (limit) params.set('limit', String(limit)); + const qs = params.toString(); + return request( + `/api/v1/exercises/${id}/history${qs ? '?' + qs : ''}`, + ); + }, + }, + + sets: { + list(): Promise { + return request('/api/v1/sets'); + }, + + create(data: CreateSetRequest): Promise { + return request('/api/v1/sets', { + method: 'POST', + body: JSON.stringify(data), + }); + }, + + update(id: number, data: UpdateSetRequest): Promise { + return request(`/api/v1/sets/${id}`, { + method: 'PUT', + body: JSON.stringify(data), + }); + }, + + delete(id: number): Promise { + return request(`/api/v1/sets/${id}`, { + method: 'DELETE', + }); + }, + }, + + sessions: { + create(data: CreateSessionRequest): Promise { + return request('/api/v1/sessions', { + method: 'POST', + body: JSON.stringify(data), + }); + }, + + list(limit?: number, offset?: number): Promise { + const params = new URLSearchParams(); + if (limit) params.set('limit', String(limit)); + if (offset) params.set('offset', String(offset)); + const qs = params.toString(); + return request(`/api/v1/sessions${qs ? '?' + qs : ''}`); + }, + + get(id: number): Promise { + return request(`/api/v1/sessions/${id}`); + }, + + end(id: number, note?: string): Promise { + return request(`/api/v1/sessions/${id}/end`, { + method: 'PUT', + body: JSON.stringify({ note: note || '' }), + }); + }, + + createLog(sessionId: number, data: CreateLogRequest): Promise { + return request(`/api/v1/sessions/${sessionId}/logs`, { + method: 'POST', + body: JSON.stringify(data), + }); + }, + + updateLog( + sessionId: number, + logId: number, + data: UpdateLogRequest, + ): Promise { + return request( + `/api/v1/sessions/${sessionId}/logs/${logId}`, + { + method: 'PUT', + body: JSON.stringify(data), + }, + ); + }, + + deleteLog(sessionId: number, logId: number): Promise { + return request(`/api/v1/sessions/${sessionId}/logs/${logId}`, { + method: 'DELETE', + }); + }, + }, + + stats: { + overview(): Promise { + return request('/api/v1/stats/overview'); + }, + }, +}; diff --git a/frontend/src/components/exercises/ExerciseCard.tsx b/frontend/src/components/exercises/ExerciseCard.tsx new file mode 100755 index 0000000..ad960ac --- /dev/null +++ b/frontend/src/components/exercises/ExerciseCard.tsx @@ -0,0 +1,54 @@ +import type { Exercise } from '../../types'; +import { MUSCLE_GROUP_LABELS, MUSCLE_GROUP_COLORS } from '../../types'; + +interface ExerciseCardProps { + exercise: Exercise; + onEdit: (exercise: Exercise) => void; + onDelete: (exercise: Exercise) => void; +} + +export function ExerciseCard({ exercise, onEdit, onDelete }: ExerciseCardProps) { + const label = MUSCLE_GROUP_LABELS[exercise.muscle_group] || exercise.muscle_group; + const color = MUSCLE_GROUP_COLORS[exercise.muscle_group] || 'bg-gray-600'; + + return ( +
+
+
+

{exercise.name}

+ {exercise.description && ( +

{exercise.description}

+ )} +
+ + {label} + + + Schritt: {exercise.weight_step_kg} kg + +
+
+
+ + +
+
+
+ ); +} diff --git a/frontend/src/components/exercises/ExerciseForm.tsx b/frontend/src/components/exercises/ExerciseForm.tsx new file mode 100755 index 0000000..24a57f3 --- /dev/null +++ b/frontend/src/components/exercises/ExerciseForm.tsx @@ -0,0 +1,116 @@ +import { useState, useEffect } from 'react'; +import type { Exercise, CreateExerciseRequest, MuscleGroup } from '../../types'; +import { MUSCLE_GROUPS } from '../../types'; + +interface ExerciseFormProps { + exercise?: Exercise | null; + onSubmit: (data: CreateExerciseRequest) => void; + onCancel: () => void; +} + +export function ExerciseForm({ exercise, onSubmit, onCancel }: ExerciseFormProps) { + const [name, setName] = useState(''); + const [description, setDescription] = useState(''); + const [muscleGroup, setMuscleGroup] = useState('brust'); + const [weightStep, setWeightStep] = useState(2.5); + + useEffect(() => { + if (exercise) { + setName(exercise.name); + setDescription(exercise.description); + setMuscleGroup(exercise.muscle_group); + setWeightStep(exercise.weight_step_kg); + } else { + setName(''); + setDescription(''); + setMuscleGroup('brust'); + setWeightStep(2.5); + } + }, [exercise]); + + const handleSubmit = (e: React.FormEvent) => { + e.preventDefault(); + onSubmit({ + name: name.trim(), + description: description.trim(), + muscle_group: muscleGroup, + weight_step_kg: weightStep, + }); + }; + + const isValid = name.trim().length > 0; + + return ( +
+

+ {exercise ? 'Übung bearbeiten' : 'Neue Übung'} +

+ +
+ + setName(e.target.value)} + className="w-full bg-gray-800 border border-gray-700 rounded-lg px-3 py-2 text-gray-100 focus:outline-none focus:border-blue-500 min-h-[44px]" + placeholder="Übungsname" + maxLength={100} + /> +
+ +
+ +