Compare commits
36 Commits
de6e387c7b
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
17186e7b64 | ||
|
|
8eef933573 | ||
|
|
b73d91ccaa | ||
|
|
fa76787d7b | ||
|
|
1ce2f5aa47 | ||
|
|
735419662c | ||
|
|
213e015790 | ||
|
|
73665e1bc7 | ||
|
|
c8cd971f49 | ||
|
|
4d7ff23457 | ||
|
|
86627f94b1 | ||
|
|
034d16e059 | ||
|
|
d1649ddfce | ||
|
|
45875502f8 | ||
|
|
156917ece1 | ||
|
|
ad65102fdc | ||
|
|
e2419411fa | ||
|
|
286b48247e | ||
|
|
36766592d3 | ||
|
|
e240dfe39c | ||
|
|
cd1b8e23d2 | ||
|
|
691b49da40 | ||
|
|
7f4a5a63c1 | ||
|
|
58729ad511 | ||
|
|
4b0be2b5f9 | ||
|
|
d0b0b4f8bd | ||
|
|
0bb7758a2f | ||
|
|
a00610b831 | ||
|
|
db6726d6d7 | ||
|
|
0e4b01bcf2 | ||
|
|
dc671117f5 | ||
|
|
579597776c | ||
|
|
1fb015b0fc | ||
|
|
2a74ea3702 | ||
|
|
257d1e4062 | ||
|
|
b865e5a283 |
@@ -1,4 +0,0 @@
|
||||
# Copy to .env and set a strong password before starting
|
||||
DB_USER=pamietnik
|
||||
DB_NAME=pamietnik
|
||||
DB_PASSWORD=change-me-before-production
|
||||
39
.gitea/workflows/deploy.yml
Normal file
39
.gitea/workflows/deploy.yml
Normal file
@@ -0,0 +1,39 @@
|
||||
name: Deploy to NAS
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: self-hosted
|
||||
container:
|
||||
image: docker:latest
|
||||
options: -v /volume2/docker:/volume2/docker
|
||||
|
||||
steps:
|
||||
- name: Pull code
|
||||
run: |
|
||||
if [ -d "${{ vars.DEPLOY_DIR }}/.git" ]; then
|
||||
git -C ${{ vars.DEPLOY_DIR }} fetch origin main
|
||||
git -C ${{ vars.DEPLOY_DIR }} reset --hard origin/main
|
||||
git -C ${{ vars.DEPLOY_DIR }} clean -fd
|
||||
else
|
||||
git clone http://192.168.1.4:3000/christoph/pamietnik.git ${{ vars.DEPLOY_DIR }}
|
||||
fi
|
||||
|
||||
- name: Write .env
|
||||
run: |
|
||||
printf 'DB_PASSWORD=%s\nADMIN_USER=%s\nADMIN_PASSWORD=%s\n' \
|
||||
'${{ secrets.DB_PASSWORD }}' \
|
||||
'${{ vars.ADMIN_USER }}' \
|
||||
'${{ secrets.ADMIN_PASSWORD }}' \
|
||||
> ${{ vars.DEPLOY_DIR }}/.env
|
||||
|
||||
- name: Build & Deploy
|
||||
run: docker compose -f ${{ vars.DEPLOY_DIR }}/docker-compose.yml up --build -d
|
||||
|
||||
- name: Health check
|
||||
run: |
|
||||
sleep 15
|
||||
wget -qO- http://192.168.1.4:9050/healthz || exit 1
|
||||
29
CLAUDE.md
29
CLAUDE.md
@@ -62,8 +62,23 @@ internal/
|
||||
ingest.go POST /v1/trackpoints, POST /v1/trackpoints:batch
|
||||
query.go GET /v1/days, /v1/trackpoints, /v1/stops, /v1/suggestions
|
||||
webui.go server-side rendered web UI (login, /days, /days/{date})
|
||||
journal.go journal entry endpoints
|
||||
journal.go POST /entries, GET/POST /entries/{id}, GET /entries/{id}/edit
|
||||
media.go POST /media — single-file upload, returns markdown reference
|
||||
response.go shared response helpers
|
||||
api/static/
|
||||
style.css global styles (Pico CSS overrides)
|
||||
day.js GPS button, time auto-fill
|
||||
editor.js textarea drag-drop/paste upload → markdown ref insert
|
||||
autoplay.js IntersectionObserver: videos autoplay when visible
|
||||
api/templates/
|
||||
base.html layout + global nav (LoggedIn, IsAdmin injected by render())
|
||||
days.html day list + date picker
|
||||
day.html day detail: new entry form, entries, stops, trackpoints
|
||||
edit_entry.html edit existing entry
|
||||
public.html public feed (infinite scroll)
|
||||
login.html login form
|
||||
register.html self-registration
|
||||
admin/ admin layout + entries/users pages
|
||||
```
|
||||
|
||||
Key invariants:
|
||||
@@ -151,9 +166,15 @@ Webapp erreichbar unter `http://localhost:9050`.
|
||||
|
||||
**Erster Benutzer anlegen** (nach dem ersten Start):
|
||||
```bash
|
||||
docker-compose exec api /createuser
|
||||
# oder lokal:
|
||||
cd backend && go run ./cmd/createuser
|
||||
# Lokal (docker-compose):
|
||||
docker-compose exec api /createuser <username> <password>
|
||||
|
||||
# Produktion (NAS, Container läuft via Gitea Actions):
|
||||
docker exec -it pamietnik-api-1 /createuser <username> <password>
|
||||
# Container-Name prüfen: docker ps | grep pamietnik
|
||||
|
||||
# Lokal ohne Docker:
|
||||
cd backend && go run ./cmd/createuser <username> <password>
|
||||
```
|
||||
|
||||
**Nur neu bauen ohne Cache:**
|
||||
|
||||
14
Dockerfile
14
Dockerfile
@@ -6,12 +6,24 @@ RUN npm ci
|
||||
COPY webapp/ ./
|
||||
RUN npm run build
|
||||
|
||||
# Stage 2: Build Go server
|
||||
# Stage 2: Compile static TypeScript
|
||||
FROM node:22-alpine AS static-ts-builder
|
||||
WORKDIR /build/backend
|
||||
COPY backend/static-ts/ ./static-ts/
|
||||
RUN mkdir -p ./internal/api/static \
|
||||
&& cd ./static-ts \
|
||||
&& npm ci \
|
||||
&& npm run build
|
||||
|
||||
# Stage 3: Build Go server
|
||||
FROM golang:1.25-alpine AS go-builder
|
||||
WORKDIR /app
|
||||
COPY backend/go.mod backend/go.sum ./
|
||||
RUN go mod download
|
||||
COPY backend/ ./
|
||||
# Inject compiled static JS
|
||||
COPY --from=static-ts-builder /build/backend/internal/api/static/ ./internal/api/static/
|
||||
RUN go test ./...
|
||||
# Inject built SPA into embed path
|
||||
COPY --from=webapp-builder /webapp/dist ./internal/api/webapp/
|
||||
RUN CGO_ENABLED=0 GOOS=linux go build -o /server ./cmd/server
|
||||
|
||||
1
backend
1
backend
Submodule backend deleted from dbcb0d4a09
2
backend/.env.example
Normal file
2
backend/.env.example
Normal file
@@ -0,0 +1,2 @@
|
||||
DATABASE_URL=postgres://ralph:ralph@localhost:5432/ralph?sslmode=disable
|
||||
LISTEN_ADDR=:8080
|
||||
29
backend/.gitignore
vendored
Normal file
29
backend/.gitignore
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
# Binaries
|
||||
/server
|
||||
/migrate
|
||||
/createuser
|
||||
*.exe
|
||||
|
||||
# Build output
|
||||
dist/
|
||||
bin/
|
||||
|
||||
# Uploads
|
||||
uploads/
|
||||
|
||||
# Environment / Secrets
|
||||
.env
|
||||
*.env
|
||||
|
||||
# IDE
|
||||
.idea/
|
||||
.vscode/
|
||||
*.swp
|
||||
|
||||
# OS
|
||||
.DS_Store
|
||||
|
||||
# Go test cache / coverage
|
||||
*.test
|
||||
*.out
|
||||
coverage.html
|
||||
109
backend/CLAUDE.md
Normal file
109
backend/CLAUDE.md
Normal file
@@ -0,0 +1,109 @@
|
||||
# CLAUDE.md — Pamietnik Backend (Go Server)
|
||||
|
||||
## Stack
|
||||
|
||||
Language: Go
|
||||
DB: PostgreSQL
|
||||
API-Doc: OpenAPI 3.1 (openapi.yaml)
|
||||
Auth: Session Cookie (Web UI); API-Key oder JWT (Android Upload, TBD)
|
||||
Hashing: Argon2id (Passwörter)
|
||||
Geocoding: Nominatim (OSM) mit Cache + Rate-Limit; Provider austauschbar
|
||||
Maps: OpenStreetMap Tiles (konfigurierbar, serverseitig)
|
||||
Dev: docker-compose (API + PostgreSQL)
|
||||
|
||||
---
|
||||
|
||||
## Kern-Features (Backend)
|
||||
|
||||
1. REST API Ingest: Single + Batch Trackpoints (Idempotenz via event_id)
|
||||
2. Idempotenz/Dedupe: Unique Key (device_id, event_id); Duplikate = 200 OK
|
||||
3. Stop Detection: Aufenthalte erkennen (minDuration + radiusMeters konfigurierbar)
|
||||
4. Suggestions: Aus Stops Vorschläge ableiten + speichern
|
||||
5. Reverse-Geocoding: Nominatim gecached, Provider austauschbar via Config
|
||||
6. Web UI: Login (Session Cookie), Tagesübersicht, Tagesdetail, Karte
|
||||
7. Auth: Argon2id Passwort-Hashing, Session-Store in PostgreSQL
|
||||
|
||||
---
|
||||
|
||||
## API Endpoints
|
||||
|
||||
Ingest:
|
||||
POST /v1/trackpoints <- Single Trackpoint
|
||||
POST /v1/trackpoints:batch <- Batch Trackpoints
|
||||
GET /healthz
|
||||
GET /readyz
|
||||
|
||||
Query (Auth required):
|
||||
GET /v1/days?from=YYYY-MM-DD&to=YYYY-MM-DD
|
||||
GET /v1/trackpoints?date=YYYY-MM-DD
|
||||
GET /v1/stops?date=YYYY-MM-DD
|
||||
GET /v1/suggestions?date=YYYY-MM-DD
|
||||
|
||||
Web UI (Session Cookie, serverseitig gerendert):
|
||||
GET /login
|
||||
POST /login
|
||||
POST /logout
|
||||
GET /days
|
||||
GET /days/{yyyy-mm-dd}
|
||||
|
||||
---
|
||||
|
||||
## Datenmodell (Kern)
|
||||
|
||||
Trackpoint:
|
||||
event_id string (UUID, client-generated)
|
||||
device_id string
|
||||
trip_id string
|
||||
timestamp RFC3339 oder epochMillis (TBD)
|
||||
lat, lon float64
|
||||
source "gps" | "manual"
|
||||
note string (optional)
|
||||
|
||||
Stop:
|
||||
stop_id string
|
||||
device_id, trip_id
|
||||
start_ts, end_ts
|
||||
center_lat, center_lon
|
||||
duration_s int
|
||||
place_label string (optional, Nominatim)
|
||||
|
||||
Suggestion:
|
||||
suggestion_id
|
||||
stop_id
|
||||
type "highlight" | "name_place" | "add_note"
|
||||
title/text string
|
||||
created_at, dismissed_at
|
||||
|
||||
---
|
||||
|
||||
## Architektur-Prinzipien
|
||||
|
||||
- Idempotenz zuerst: Kein Duplicate Insert, immer event_id prüfen
|
||||
- Geocoding nur ereignisbasiert (pro Stop), niemals periodisch/bulk
|
||||
- Geocoding-Provider über Config austauschbar (kein Hardcode)
|
||||
- Sessions serverseitig in PostgreSQL (invalidierbar bei Logout)
|
||||
- Stop Detection Parameter (minDuration, radiusMeters) konfigurierbar
|
||||
- OpenAPI immer aktuell halten; Änderungen nur via PR + CI Validation
|
||||
|
||||
---
|
||||
|
||||
## Offene Entscheidungen (TBD)
|
||||
|
||||
- timestamp Format: epochMillis vs RFC3339
|
||||
- Android Upload Auth: X-API-Key vs JWT
|
||||
- Payload: JSON vs Protobuf
|
||||
- Batch limits (max items, max bytes)
|
||||
- Retention Policy (Trackpoints löschen nach X Tagen)
|
||||
- Stop-Detection Parameter (Mindestdauer, Radius)
|
||||
- Geocoding Provider: Nominatim public vs self-hosted vs Alternative
|
||||
|
||||
---
|
||||
|
||||
## Nächste Tasks (Reihenfolge)
|
||||
|
||||
- [ ] T024 REST API finalisieren (Endpoints, Fehlerformat, Limits)
|
||||
- [ ] T027 PostgreSQL Schema + Migrationen + Indizes
|
||||
- [ ] T028 Idempotenz implementieren (unique event_id pro device)
|
||||
- [ ] T029 Observability (Logs/Metrics), Health/Ready
|
||||
- [ ] T030 docker-compose lokal (API + PostgreSQL) + Minimal-Client
|
||||
- [ ] T050 Auth-Konzept festlegen
|
||||
12
backend/Dockerfile
Normal file
12
backend/Dockerfile
Normal file
@@ -0,0 +1,12 @@
|
||||
FROM golang:1.25-alpine AS builder
|
||||
WORKDIR /app
|
||||
COPY go.mod go.sum ./
|
||||
RUN go mod download
|
||||
COPY . .
|
||||
RUN CGO_ENABLED=0 GOOS=linux go build -o /server ./cmd/server
|
||||
RUN CGO_ENABLED=0 GOOS=linux go build -o /createuser ./cmd/createuser
|
||||
|
||||
FROM gcr.io/distroless/static-debian12
|
||||
COPY --from=builder /server /server
|
||||
COPY --from=builder /createuser /createuser
|
||||
ENTRYPOINT ["/server"]
|
||||
55
backend/cmd/createuser/main.go
Normal file
55
backend/cmd/createuser/main.go
Normal file
@@ -0,0 +1,55 @@
|
||||
// cmd/createuser creates a new user in the database.
|
||||
// Usage: DATABASE_URL=... go run ./cmd/createuser <username> <password>
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
"github.com/jackc/pgx/v5"
|
||||
|
||||
"github.com/jacek/pamietnik/backend/internal/auth"
|
||||
)
|
||||
|
||||
func main() {
|
||||
if len(os.Args) != 3 {
|
||||
fmt.Fprintln(os.Stderr, "usage: createuser <username> <password>")
|
||||
os.Exit(1)
|
||||
}
|
||||
username := os.Args[1]
|
||||
password := os.Args[2]
|
||||
|
||||
if len(password) < 8 {
|
||||
fmt.Fprintln(os.Stderr, "password must be at least 8 characters")
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
dsn := os.Getenv("DATABASE_URL")
|
||||
if dsn == "" {
|
||||
dsn = "postgres://pamietnik:pamietnik@localhost:5432/pamietnik?sslmode=disable"
|
||||
}
|
||||
|
||||
conn, err := pgx.Connect(context.Background(), dsn)
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, "db error:", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
defer conn.Close(context.Background())
|
||||
|
||||
hash, err := auth.HashPassword(password)
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, "hash error:", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
_, err = conn.Exec(context.Background(),
|
||||
`INSERT INTO users (username, password_hash) VALUES ($1, $2)`,
|
||||
username, hash,
|
||||
)
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, "insert error:", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
fmt.Printf("user '%s' created\n", username)
|
||||
}
|
||||
109
backend/cmd/server/main.go
Normal file
109
backend/cmd/server/main.go
Normal file
@@ -0,0 +1,109 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"os"
|
||||
"os/signal"
|
||||
"syscall"
|
||||
"time"
|
||||
|
||||
"github.com/jacek/pamietnik/backend/internal/api"
|
||||
"github.com/jacek/pamietnik/backend/internal/auth"
|
||||
"github.com/jacek/pamietnik/backend/internal/db"
|
||||
)
|
||||
|
||||
func main() {
|
||||
logger := slog.New(slog.NewJSONHandler(os.Stdout, nil))
|
||||
slog.SetDefault(logger)
|
||||
|
||||
dsn := getenv("DATABASE_URL", "postgres://pamietnik:pamietnik@localhost:5432/pamietnik?sslmode=disable")
|
||||
addr := getenv("LISTEN_ADDR", ":8080")
|
||||
uploadDir := getenv("UPLOAD_DIR", "./uploads")
|
||||
|
||||
ctx, cancel := signal.NotifyContext(context.Background(), syscall.SIGINT, syscall.SIGTERM)
|
||||
defer cancel()
|
||||
|
||||
pool, err := db.NewPool(ctx, dsn)
|
||||
if err != nil {
|
||||
slog.Error("connect db", "err", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
defer pool.Close()
|
||||
slog.Info("database connected")
|
||||
|
||||
if err := db.InitSchema(ctx, pool); err != nil {
|
||||
slog.Error("init schema", "err", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
slog.Info("schema ready")
|
||||
|
||||
if adminUser := os.Getenv("ADMIN_USER"); adminUser != "" {
|
||||
adminPass := os.Getenv("ADMIN_PASSWORD")
|
||||
if adminPass == "" {
|
||||
slog.Error("ADMIN_USER set but ADMIN_PASSWORD is empty")
|
||||
os.Exit(1)
|
||||
}
|
||||
hash, err := auth.HashPassword(adminPass)
|
||||
if err != nil {
|
||||
slog.Error("hash admin password", "err", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
created, err := db.SeedAdminUser(ctx, pool, adminUser, hash)
|
||||
if err != nil {
|
||||
slog.Error("seed admin user", "err", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
if created {
|
||||
slog.Info("admin user created", "username", adminUser)
|
||||
}
|
||||
}
|
||||
|
||||
if err := os.MkdirAll(uploadDir, 0o755); err != nil {
|
||||
slog.Error("create upload dir", "err", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
authStore := auth.NewStore(pool)
|
||||
tpStore := db.NewTrackpointStore(pool)
|
||||
stopStore := db.NewStopStore(pool)
|
||||
suggStore := db.NewSuggestionStore(pool)
|
||||
journalStore := db.NewJournalStore(pool)
|
||||
userStore := db.NewUserStore(pool)
|
||||
|
||||
router := api.NewRouter(authStore, tpStore, stopStore, suggStore, journalStore, userStore, uploadDir)
|
||||
|
||||
srv := &http.Server{
|
||||
Addr: addr,
|
||||
Handler: router,
|
||||
ReadTimeout: 15 * time.Second,
|
||||
WriteTimeout: 30 * time.Second,
|
||||
IdleTimeout: 60 * time.Second,
|
||||
}
|
||||
|
||||
go func() {
|
||||
slog.Info("server starting", "addr", addr)
|
||||
if err := srv.ListenAndServe(); err != nil && err != http.ErrServerClosed {
|
||||
slog.Error("server error", "err", err)
|
||||
cancel()
|
||||
}
|
||||
}()
|
||||
|
||||
<-ctx.Done()
|
||||
slog.Info("shutting down")
|
||||
|
||||
shutCtx, shutCancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||
defer shutCancel()
|
||||
if err := srv.Shutdown(shutCtx); err != nil {
|
||||
slog.Error("shutdown error", "err", err)
|
||||
}
|
||||
slog.Info("server stopped")
|
||||
}
|
||||
|
||||
func getenv(key, fallback string) string {
|
||||
if v := os.Getenv(key); v != "" {
|
||||
return v
|
||||
}
|
||||
return fallback
|
||||
}
|
||||
23
backend/go.mod
Normal file
23
backend/go.mod
Normal file
@@ -0,0 +1,23 @@
|
||||
module github.com/jacek/pamietnik/backend
|
||||
|
||||
go 1.25.7
|
||||
|
||||
require (
|
||||
github.com/go-chi/chi/v5 v5.2.5
|
||||
github.com/golang-migrate/migrate/v4 v4.19.1
|
||||
github.com/jackc/pgx/v5 v5.8.0
|
||||
golang.org/x/crypto v0.48.0
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/disintegration/imaging v1.6.2 // indirect
|
||||
github.com/jackc/pgerrcode v0.0.0-20220416144525-469b46aa5efa // indirect
|
||||
github.com/jackc/pgpassfile v1.0.0 // indirect
|
||||
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect
|
||||
github.com/jackc/puddle/v2 v2.2.2 // indirect
|
||||
github.com/yuin/goldmark v1.8.2 // indirect
|
||||
golang.org/x/image v0.39.0 // indirect
|
||||
golang.org/x/sync v0.20.0 // indirect
|
||||
golang.org/x/sys v0.41.0 // indirect
|
||||
golang.org/x/text v0.36.0 // indirect
|
||||
)
|
||||
100
backend/go.sum
Normal file
100
backend/go.sum
Normal file
@@ -0,0 +1,100 @@
|
||||
github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 h1:L/gRVlceqvL25UVaW/CKtUDjefjrs0SPonmDGUVOYP0=
|
||||
github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E=
|
||||
github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY=
|
||||
github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU=
|
||||
github.com/containerd/errdefs v1.0.0 h1:tg5yIfIlQIrxYtu9ajqY42W3lpS19XqdxRQeEwYG8PI=
|
||||
github.com/containerd/errdefs v1.0.0/go.mod h1:+YBYIdtsnF4Iw6nWZhJcqGSg/dwvV7tyJ/kCkyJ2k+M=
|
||||
github.com/containerd/errdefs/pkg v0.3.0 h1:9IKJ06FvyNlexW690DXuQNx2KA2cUJXx151Xdx3ZPPE=
|
||||
github.com/containerd/errdefs/pkg v0.3.0/go.mod h1:NJw6s9HwNuRhnjJhM7pylWwMyAkmCQvQ4GpJHEqRLVk=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
|
||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/dhui/dktest v0.4.6 h1:+DPKyScKSEp3VLtbMDHcUq6V5Lm5zfZZVb0Sk7Ahom4=
|
||||
github.com/dhui/dktest v0.4.6/go.mod h1:JHTSYDtKkvFNFHJKqCzVzqXecyv+tKt8EzceOmQOgbU=
|
||||
github.com/disintegration/imaging v1.6.2 h1:w1LecBlG2Lnp8B3jk5zSuNqd7b4DXhcjwek1ei82L+c=
|
||||
github.com/disintegration/imaging v1.6.2/go.mod h1:44/5580QXChDfwIclfc/PCwrr44amcmDAg8hxG0Ewe4=
|
||||
github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk=
|
||||
github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E=
|
||||
github.com/docker/docker v28.3.3+incompatible h1:Dypm25kh4rmk49v1eiVbsAtpAsYURjYkaKubwuBdxEI=
|
||||
github.com/docker/docker v28.3.3+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
|
||||
github.com/docker/go-connections v0.5.0 h1:USnMq7hx7gwdVZq1L49hLXaFtUdTADjXGp+uj1Br63c=
|
||||
github.com/docker/go-connections v0.5.0/go.mod h1:ov60Kzw0kKElRwhNs9UlUHAE/F9Fe6GLaXnqyDdmEXc=
|
||||
github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4=
|
||||
github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
|
||||
github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
|
||||
github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
|
||||
github.com/go-chi/chi/v5 v5.2.5 h1:Eg4myHZBjyvJmAFjFvWgrqDTXFyOzjj7YIm3L3mu6Ug=
|
||||
github.com/go-chi/chi/v5 v5.2.5/go.mod h1:X7Gx4mteadT3eDOMTsXzmI4/rwUpOwBHLpAfupzFJP0=
|
||||
github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI=
|
||||
github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
|
||||
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
|
||||
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
|
||||
github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q=
|
||||
github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q=
|
||||
github.com/golang-migrate/migrate/v4 v4.19.1 h1:OCyb44lFuQfYXYLx1SCxPZQGU7mcaZ7gH9yH4jSFbBA=
|
||||
github.com/golang-migrate/migrate/v4 v4.19.1/go.mod h1:CTcgfjxhaUtsLipnLoQRWCrjYXycRz/g5+RWDuYgPrE=
|
||||
github.com/jackc/pgerrcode v0.0.0-20220416144525-469b46aa5efa h1:s+4MhCQ6YrzisK6hFJUX53drDT4UsSW3DEhKn0ifuHw=
|
||||
github.com/jackc/pgerrcode v0.0.0-20220416144525-469b46aa5efa/go.mod h1:a/s9Lp5W7n/DD0VrVoyJ00FbP2ytTPDVOivvn2bMlds=
|
||||
github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
|
||||
github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg=
|
||||
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 h1:iCEnooe7UlwOQYpKFhBabPMi4aNAfoODPEFNiAnClxo=
|
||||
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM=
|
||||
github.com/jackc/pgx/v5 v5.8.0 h1:TYPDoleBBme0xGSAX3/+NujXXtpZn9HBONkQC7IEZSo=
|
||||
github.com/jackc/pgx/v5 v5.8.0/go.mod h1:QVeDInX2m9VyzvNeiCJVjCkNFqzsNb43204HshNSZKw=
|
||||
github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo=
|
||||
github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
|
||||
github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
|
||||
github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
|
||||
github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0=
|
||||
github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo=
|
||||
github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0=
|
||||
github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y=
|
||||
github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A=
|
||||
github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc=
|
||||
github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U=
|
||||
github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM=
|
||||
github.com/opencontainers/image-spec v1.1.0 h1:8SG7/vwALn54lVB/0yZ/MMwhFrPYtpEHQb2IpWsCzug=
|
||||
github.com/opencontainers/image-spec v1.1.0/go.mod h1:W4s4sFTMaBeK1BQLXbG4AdM2szdn85PY75RI83NrTrM=
|
||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
|
||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
|
||||
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
|
||||
github.com/yuin/goldmark v1.8.2 h1:kEGpgqJXdgbkhcOgBxkC0X0PmoPG1ZyoZ117rDVp4zE=
|
||||
github.com/yuin/goldmark v1.8.2/go.mod h1:ip/1k0VRfGynBgxOz0yCqHrbZXhcjxyuS66Brc7iBKg=
|
||||
go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA=
|
||||
go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A=
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 h1:F7Jx+6hwnZ41NSFTO5q4LYDtJRXBf2PD0rNBkeB/lus=
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0/go.mod h1:UHB22Z8QsdRDrnAtX4PntOl36ajSxcdUMt1sF7Y6E7Q=
|
||||
go.opentelemetry.io/otel v1.37.0 h1:9zhNfelUvx0KBfu/gb+ZgeAfAgtWrfHJZcAqFC228wQ=
|
||||
go.opentelemetry.io/otel v1.37.0/go.mod h1:ehE/umFRLnuLa/vSccNq9oS1ErUlkkK71gMcN34UG8I=
|
||||
go.opentelemetry.io/otel/metric v1.37.0 h1:mvwbQS5m0tbmqML4NqK+e3aDiO02vsf/WgbsdpcPoZE=
|
||||
go.opentelemetry.io/otel/metric v1.37.0/go.mod h1:04wGrZurHYKOc+RKeye86GwKiTb9FKm1WHtO+4EVr2E=
|
||||
go.opentelemetry.io/otel/trace v1.37.0 h1:HLdcFNbRQBE2imdSEgm/kwqmQj1Or1l/7bW6mxVK7z4=
|
||||
go.opentelemetry.io/otel/trace v1.37.0/go.mod h1:TlgrlQ+PtQO5XFerSPUYG0JSgGyryXewPGyayAWSBS0=
|
||||
golang.org/x/crypto v0.48.0 h1:/VRzVqiRSggnhY7gNRxPauEQ5Drw9haKdM0jqfcCFts=
|
||||
golang.org/x/crypto v0.48.0/go.mod h1:r0kV5h3qnFPlQnBSrULhlsRfryS2pmewsg+XfMgkVos=
|
||||
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8 h1:hVwzHzIUGRjiF7EcUjqNxk3NCfkPxbDKRdnNE1Rpg0U=
|
||||
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
||||
golang.org/x/image v0.39.0 h1:skVYidAEVKgn8lZ602XO75asgXBgLj9G/FE3RbuPFww=
|
||||
golang.org/x/image v0.39.0/go.mod h1:sIbmppfU+xFLPIG0FoVUTvyBMmgng1/XAMhQ2ft0hpA=
|
||||
golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4=
|
||||
golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
||||
golang.org/x/sync v0.20.0 h1:e0PTpb7pjO8GAtTs2dQ6jYa5BWYlMuX047Dco/pItO4=
|
||||
golang.org/x/sync v0.20.0/go.mod h1:9xrNwdLfx4jkKbNva9FpL6vEN7evnE43NNNJQ2LF3+0=
|
||||
golang.org/x/sys v0.41.0 h1:Ivj+2Cp/ylzLiEU89QhWblYnOE9zerudt9Ftecq2C6k=
|
||||
golang.org/x/sys v0.41.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.34.0 h1:oL/Qq0Kdaqxa1KbNeMKwQq0reLCCaFtqu2eNuSeNHbk=
|
||||
golang.org/x/text v0.34.0/go.mod h1:homfLqTYRFyVYemLBFl5GgL/DWEiH5wcsQ5gSh1yziA=
|
||||
golang.org/x/text v0.36.0 h1:JfKh3XmcRPqZPKevfXVpI1wXPTqbkE5f7JA92a55Yxg=
|
||||
golang.org/x/text v0.36.0/go.mod h1:NIdBknypM8iqVmPiuco0Dh6P5Jcdk8lJL0CUebqK164=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
137
backend/internal/api/ingest.go
Normal file
137
backend/internal/api/ingest.go
Normal file
@@ -0,0 +1,137 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/jacek/pamietnik/backend/internal/db"
|
||||
"github.com/jacek/pamietnik/backend/internal/domain"
|
||||
)
|
||||
|
||||
// Compile-time check: *db.TrackpointStore must satisfy db.TrackpointStorer.
|
||||
var _ db.TrackpointStorer = (*db.TrackpointStore)(nil)
|
||||
|
||||
type trackpointInput struct {
|
||||
EventID string `json:"event_id"`
|
||||
DeviceID string `json:"device_id"`
|
||||
TripID string `json:"trip_id"`
|
||||
Timestamp string `json:"timestamp"` // RFC3339
|
||||
Lat float64 `json:"lat"`
|
||||
Lon float64 `json:"lon"`
|
||||
Source string `json:"source"`
|
||||
Note string `json:"note,omitempty"`
|
||||
AccuracyM *float64 `json:"accuracy_m,omitempty"`
|
||||
SpeedMps *float64 `json:"speed_mps,omitempty"`
|
||||
BearingDeg *float64 `json:"bearing_deg,omitempty"`
|
||||
AltitudeM *float64 `json:"altitude_m,omitempty"`
|
||||
}
|
||||
|
||||
func (t trackpointInput) toDomain() (domain.Trackpoint, error) {
|
||||
ts, err := time.Parse(time.RFC3339, t.Timestamp)
|
||||
if err != nil {
|
||||
return domain.Trackpoint{}, err
|
||||
}
|
||||
src := t.Source
|
||||
if src == "" {
|
||||
src = "gps"
|
||||
}
|
||||
return domain.Trackpoint{
|
||||
EventID: t.EventID,
|
||||
DeviceID: t.DeviceID,
|
||||
TripID: t.TripID,
|
||||
Timestamp: ts,
|
||||
Lat: t.Lat,
|
||||
Lon: t.Lon,
|
||||
Source: src,
|
||||
Note: t.Note,
|
||||
AccuracyM: t.AccuracyM,
|
||||
SpeedMps: t.SpeedMps,
|
||||
BearingDeg: t.BearingDeg,
|
||||
AltitudeM: t.AltitudeM,
|
||||
}, nil
|
||||
}
|
||||
|
||||
type batchResponse struct {
|
||||
ServerTime string `json:"server_time"`
|
||||
AcceptedIDs []string `json:"accepted_ids"`
|
||||
Rejected []db.RejectedItem `json:"rejected"`
|
||||
}
|
||||
|
||||
// HandleSingleTrackpoint handles POST /v1/trackpoints
|
||||
func HandleSingleTrackpoint(store db.TrackpointStorer) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
var input trackpointInput
|
||||
if err := json.NewDecoder(r.Body).Decode(&input); err != nil {
|
||||
writeError(w, http.StatusBadRequest, "BAD_REQUEST", "invalid JSON")
|
||||
return
|
||||
}
|
||||
|
||||
point, err := input.toDomain()
|
||||
if err != nil {
|
||||
writeError(w, http.StatusBadRequest, "BAD_REQUEST", "invalid timestamp: "+err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
userID := userIDFromContext(r.Context())
|
||||
accepted, rejected, err := store.UpsertBatch(r.Context(), userID, []domain.Trackpoint{point})
|
||||
if err != nil {
|
||||
writeError(w, http.StatusInternalServerError, "INTERNAL_ERROR", "database error")
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, batchResponse{
|
||||
ServerTime: time.Now().UTC().Format(time.RFC3339),
|
||||
AcceptedIDs: accepted,
|
||||
Rejected: rejected,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// HandleBatchTrackpoints handles POST /v1/trackpoints:batch
|
||||
func HandleBatchTrackpoints(store db.TrackpointStorer) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
var inputs []trackpointInput
|
||||
if err := json.NewDecoder(r.Body).Decode(&inputs); err != nil {
|
||||
writeError(w, http.StatusBadRequest, "BAD_REQUEST", "invalid JSON")
|
||||
return
|
||||
}
|
||||
if len(inputs) == 0 {
|
||||
writeError(w, http.StatusBadRequest, "BAD_REQUEST", "empty batch")
|
||||
return
|
||||
}
|
||||
if len(inputs) > 500 {
|
||||
writeError(w, http.StatusBadRequest, "TOO_LARGE", "batch exceeds 500 items")
|
||||
return
|
||||
}
|
||||
|
||||
points := make([]domain.Trackpoint, 0, len(inputs))
|
||||
var parseRejected []db.RejectedItem
|
||||
for _, inp := range inputs {
|
||||
p, err := inp.toDomain()
|
||||
if err != nil {
|
||||
parseRejected = append(parseRejected, db.RejectedItem{
|
||||
EventID: inp.EventID,
|
||||
Code: "INVALID_TIMESTAMP",
|
||||
Message: err.Error(),
|
||||
})
|
||||
continue
|
||||
}
|
||||
points = append(points, p)
|
||||
}
|
||||
|
||||
userID := userIDFromContext(r.Context())
|
||||
accepted, rejected, err := store.UpsertBatch(r.Context(), userID, points)
|
||||
if err != nil {
|
||||
writeError(w, http.StatusInternalServerError, "INTERNAL_ERROR", "database error")
|
||||
return
|
||||
}
|
||||
rejected = append(rejected, parseRejected...)
|
||||
|
||||
writeJSON(w, http.StatusOK, batchResponse{
|
||||
ServerTime: time.Now().UTC().Format(time.RFC3339),
|
||||
AcceptedIDs: accepted,
|
||||
Rejected: rejected,
|
||||
})
|
||||
}
|
||||
}
|
||||
380
backend/internal/api/ingest_test.go
Normal file
380
backend/internal/api/ingest_test.go
Normal file
@@ -0,0 +1,380 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/jacek/pamietnik/backend/internal/db"
|
||||
"github.com/jacek/pamietnik/backend/internal/domain"
|
||||
)
|
||||
|
||||
// fakeTrackpointStore is an in-memory implementation of db.TrackpointStorer for tests.
|
||||
type fakeTrackpointStore struct {
|
||||
// stored maps event_id → Trackpoint (simulates unique constraint)
|
||||
stored map[string]domain.Trackpoint
|
||||
// forceErr causes UpsertBatch to return an error when set
|
||||
forceErr error
|
||||
}
|
||||
|
||||
func newFakeTrackpointStore() *fakeTrackpointStore {
|
||||
return &fakeTrackpointStore{stored: make(map[string]domain.Trackpoint)}
|
||||
}
|
||||
|
||||
func (f *fakeTrackpointStore) UpsertBatch(_ context.Context, _ string, points []domain.Trackpoint) ([]string, []db.RejectedItem, error) {
|
||||
if f.forceErr != nil {
|
||||
return nil, nil, f.forceErr
|
||||
}
|
||||
var accepted []string
|
||||
var rejected []db.RejectedItem
|
||||
for _, p := range points {
|
||||
// Validate
|
||||
if p.EventID == "" {
|
||||
rejected = append(rejected, db.RejectedItem{EventID: p.EventID, Code: "VALIDATION_ERROR", Message: "event_id is required"})
|
||||
continue
|
||||
}
|
||||
if p.DeviceID == "" {
|
||||
rejected = append(rejected, db.RejectedItem{EventID: p.EventID, Code: "VALIDATION_ERROR", Message: "device_id is required"})
|
||||
continue
|
||||
}
|
||||
if p.Lat < -90 || p.Lat > 90 {
|
||||
rejected = append(rejected, db.RejectedItem{EventID: p.EventID, Code: "VALIDATION_ERROR", Message: "lat out of range"})
|
||||
continue
|
||||
}
|
||||
if p.Lon < -180 || p.Lon > 180 {
|
||||
rejected = append(rejected, db.RejectedItem{EventID: p.EventID, Code: "VALIDATION_ERROR", Message: "lon out of range"})
|
||||
continue
|
||||
}
|
||||
// Idempotency: already stored → count as accepted (no duplicate insert)
|
||||
if _, exists := f.stored[p.EventID]; !exists {
|
||||
f.stored[p.EventID] = p
|
||||
}
|
||||
accepted = append(accepted, p.EventID)
|
||||
}
|
||||
return accepted, rejected, nil
|
||||
}
|
||||
|
||||
func (f *fakeTrackpointStore) ListByDate(_ context.Context, _, _ string) ([]domain.Trackpoint, error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (f *fakeTrackpointStore) ListDays(_ context.Context, _, _, _ string) ([]domain.DaySummary, error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// authContext injects a fake user_id into the request context, simulating a logged-in session.
|
||||
func authContext(r *http.Request) *http.Request {
|
||||
return r.WithContext(contextWithUserID(r.Context(), "user-test"))
|
||||
}
|
||||
|
||||
// --- HandleSingleTrackpoint tests ---
|
||||
|
||||
func TestHandleSingleTrackpoint_HappyPath(t *testing.T) {
|
||||
store := newFakeTrackpointStore()
|
||||
handler := HandleSingleTrackpoint(store)
|
||||
|
||||
body := `{
|
||||
"event_id": "evt-001",
|
||||
"device_id": "dev-001",
|
||||
"trip_id": "trip-1",
|
||||
"timestamp": "2024-06-01T12:00:00Z",
|
||||
"lat": 52.5,
|
||||
"lon": 13.4,
|
||||
"source": "gps"
|
||||
}`
|
||||
req := httptest.NewRequest(http.MethodPost, "/v1/trackpoints", strings.NewReader(body))
|
||||
req = authContext(req)
|
||||
rec := httptest.NewRecorder()
|
||||
|
||||
handler.ServeHTTP(rec, req)
|
||||
|
||||
if rec.Code != http.StatusOK {
|
||||
t.Fatalf("expected 200, got %d: %s", rec.Code, rec.Body.String())
|
||||
}
|
||||
var resp batchResponse
|
||||
if err := json.NewDecoder(rec.Body).Decode(&resp); err != nil {
|
||||
t.Fatalf("decode response: %v", err)
|
||||
}
|
||||
if len(resp.AcceptedIDs) != 1 || resp.AcceptedIDs[0] != "evt-001" {
|
||||
t.Errorf("expected accepted_ids=[evt-001], got %v", resp.AcceptedIDs)
|
||||
}
|
||||
if len(resp.Rejected) != 0 {
|
||||
t.Errorf("expected no rejected, got %v", resp.Rejected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestHandleSingleTrackpoint_InvalidJSON(t *testing.T) {
|
||||
handler := HandleSingleTrackpoint(newFakeTrackpointStore())
|
||||
|
||||
req := httptest.NewRequest(http.MethodPost, "/v1/trackpoints", strings.NewReader("{bad json"))
|
||||
req = authContext(req)
|
||||
rec := httptest.NewRecorder()
|
||||
|
||||
handler.ServeHTTP(rec, req)
|
||||
|
||||
if rec.Code != http.StatusBadRequest {
|
||||
t.Errorf("expected 400, got %d", rec.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestHandleSingleTrackpoint_InvalidTimestamp(t *testing.T) {
|
||||
handler := HandleSingleTrackpoint(newFakeTrackpointStore())
|
||||
|
||||
body := `{"event_id":"e1","device_id":"d1","timestamp":"not-a-date","lat":10,"lon":10}`
|
||||
req := httptest.NewRequest(http.MethodPost, "/v1/trackpoints", strings.NewReader(body))
|
||||
req = authContext(req)
|
||||
rec := httptest.NewRecorder()
|
||||
|
||||
handler.ServeHTTP(rec, req)
|
||||
|
||||
if rec.Code != http.StatusBadRequest {
|
||||
t.Errorf("expected 400, got %d", rec.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestHandleSingleTrackpoint_MissingEventID(t *testing.T) {
|
||||
store := newFakeTrackpointStore()
|
||||
handler := HandleSingleTrackpoint(store)
|
||||
|
||||
body := `{"device_id":"dev-1","timestamp":"2024-01-01T00:00:00Z","lat":10,"lon":10,"source":"gps"}`
|
||||
req := httptest.NewRequest(http.MethodPost, "/v1/trackpoints", strings.NewReader(body))
|
||||
req = authContext(req)
|
||||
rec := httptest.NewRecorder()
|
||||
|
||||
handler.ServeHTTP(rec, req)
|
||||
|
||||
if rec.Code != http.StatusOK {
|
||||
t.Fatalf("expected 200 (rejected in payload), got %d", rec.Code)
|
||||
}
|
||||
var resp batchResponse
|
||||
json.NewDecoder(rec.Body).Decode(&resp)
|
||||
if len(resp.Rejected) == 0 {
|
||||
t.Error("expected missing event_id to appear in rejected list")
|
||||
}
|
||||
}
|
||||
|
||||
func TestHandleSingleTrackpoint_InvalidLatLon(t *testing.T) {
|
||||
cases := []struct{ lat, lon float64 }{
|
||||
{91, 0},
|
||||
{-91, 0},
|
||||
{0, 181},
|
||||
{0, -181},
|
||||
}
|
||||
for _, c := range cases {
|
||||
body := fmt.Sprintf(`{"event_id":"e1","device_id":"d1","timestamp":"2024-01-01T00:00:00Z","lat":%v,"lon":%v,"source":"gps"}`, c.lat, c.lon)
|
||||
req := httptest.NewRequest(http.MethodPost, "/v1/trackpoints", strings.NewReader(body))
|
||||
req = authContext(req)
|
||||
rec := httptest.NewRecorder()
|
||||
|
||||
HandleSingleTrackpoint(newFakeTrackpointStore()).ServeHTTP(rec, req)
|
||||
|
||||
if rec.Code != http.StatusOK {
|
||||
t.Fatalf("expected 200 with rejected payload for lat=%v lon=%v, got %d", c.lat, c.lon, rec.Code)
|
||||
}
|
||||
var resp batchResponse
|
||||
json.NewDecoder(rec.Body).Decode(&resp)
|
||||
if len(resp.Rejected) == 0 {
|
||||
t.Errorf("expected invalid lat/lon to appear in rejected list (lat=%v lon=%v)", c.lat, c.lon)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestHandleSingleTrackpoint_IdempotencyDuplicateEventID(t *testing.T) {
|
||||
store := newFakeTrackpointStore()
|
||||
handler := HandleSingleTrackpoint(store)
|
||||
|
||||
body := `{"event_id":"evt-dup","device_id":"dev-1","timestamp":"2024-01-01T00:00:00Z","lat":10,"lon":10,"source":"gps"}`
|
||||
|
||||
sendRequest := func() batchResponse {
|
||||
req := httptest.NewRequest(http.MethodPost, "/v1/trackpoints", strings.NewReader(body))
|
||||
req = authContext(req)
|
||||
rec := httptest.NewRecorder()
|
||||
handler.ServeHTTP(rec, req)
|
||||
if rec.Code != http.StatusOK {
|
||||
t.Fatalf("expected 200, got %d", rec.Code)
|
||||
}
|
||||
var resp batchResponse
|
||||
json.NewDecoder(rec.Body).Decode(&resp)
|
||||
return resp
|
||||
}
|
||||
|
||||
r1 := sendRequest()
|
||||
r2 := sendRequest()
|
||||
|
||||
// Both calls must succeed and return the same accepted_ids
|
||||
if len(r1.AcceptedIDs) != 1 || r1.AcceptedIDs[0] != "evt-dup" {
|
||||
t.Errorf("first call: expected [evt-dup], got %v", r1.AcceptedIDs)
|
||||
}
|
||||
if len(r2.AcceptedIDs) != 1 || r2.AcceptedIDs[0] != "evt-dup" {
|
||||
t.Errorf("second call: expected [evt-dup] (idempotent), got %v", r2.AcceptedIDs)
|
||||
}
|
||||
// Store must not contain duplicate entries
|
||||
if len(store.stored) != 1 {
|
||||
t.Errorf("expected exactly 1 stored trackpoint, got %d", len(store.stored))
|
||||
}
|
||||
}
|
||||
|
||||
func TestHandleSingleTrackpoint_StoreError(t *testing.T) {
|
||||
store := newFakeTrackpointStore()
|
||||
store.forceErr = fmt.Errorf("connection reset")
|
||||
handler := HandleSingleTrackpoint(store)
|
||||
|
||||
body := `{"event_id":"e1","device_id":"d1","timestamp":"2024-01-01T00:00:00Z","lat":10,"lon":10}`
|
||||
req := httptest.NewRequest(http.MethodPost, "/v1/trackpoints", strings.NewReader(body))
|
||||
req = authContext(req)
|
||||
rec := httptest.NewRecorder()
|
||||
|
||||
handler.ServeHTTP(rec, req)
|
||||
|
||||
if rec.Code != http.StatusInternalServerError {
|
||||
t.Errorf("expected 500 on store error, got %d", rec.Code)
|
||||
}
|
||||
}
|
||||
|
||||
// --- HandleBatchTrackpoints tests ---
|
||||
|
||||
func validBatchBody(n int) string {
|
||||
items := make([]string, n)
|
||||
for i := range items {
|
||||
items[i] = fmt.Sprintf(`{"event_id":"evt-%d","device_id":"dev-1","timestamp":"2024-01-01T00:00:00Z","lat":10,"lon":10,"source":"gps"}`, i)
|
||||
}
|
||||
return "[" + strings.Join(items, ",") + "]"
|
||||
}
|
||||
|
||||
func TestHandleBatchTrackpoints_HappyPath(t *testing.T) {
|
||||
handler := HandleBatchTrackpoints(newFakeTrackpointStore())
|
||||
|
||||
req := httptest.NewRequest(http.MethodPost, "/v1/trackpoints:batch", strings.NewReader(validBatchBody(3)))
|
||||
req = authContext(req)
|
||||
rec := httptest.NewRecorder()
|
||||
|
||||
handler.ServeHTTP(rec, req)
|
||||
|
||||
if rec.Code != http.StatusOK {
|
||||
t.Fatalf("expected 200, got %d: %s", rec.Code, rec.Body.String())
|
||||
}
|
||||
var resp batchResponse
|
||||
json.NewDecoder(rec.Body).Decode(&resp)
|
||||
if len(resp.AcceptedIDs) != 3 {
|
||||
t.Errorf("expected 3 accepted, got %d", len(resp.AcceptedIDs))
|
||||
}
|
||||
}
|
||||
|
||||
func TestHandleBatchTrackpoints_EmptyBatch(t *testing.T) {
|
||||
handler := HandleBatchTrackpoints(newFakeTrackpointStore())
|
||||
|
||||
req := httptest.NewRequest(http.MethodPost, "/v1/trackpoints:batch", strings.NewReader("[]"))
|
||||
req = authContext(req)
|
||||
rec := httptest.NewRecorder()
|
||||
|
||||
handler.ServeHTTP(rec, req)
|
||||
|
||||
if rec.Code != http.StatusBadRequest {
|
||||
t.Errorf("expected 400 for empty batch, got %d", rec.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestHandleBatchTrackpoints_ExceedsLimit(t *testing.T) {
|
||||
handler := HandleBatchTrackpoints(newFakeTrackpointStore())
|
||||
|
||||
req := httptest.NewRequest(http.MethodPost, "/v1/trackpoints:batch", strings.NewReader(validBatchBody(501)))
|
||||
req = authContext(req)
|
||||
rec := httptest.NewRecorder()
|
||||
|
||||
handler.ServeHTTP(rec, req)
|
||||
|
||||
if rec.Code != http.StatusBadRequest {
|
||||
t.Errorf("expected 400 for batch > 500, got %d", rec.Code)
|
||||
}
|
||||
var errResp errorResponse
|
||||
json.NewDecoder(rec.Body).Decode(&errResp)
|
||||
if errResp.Code != "TOO_LARGE" {
|
||||
t.Errorf("expected code TOO_LARGE, got %q", errResp.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestHandleBatchTrackpoints_InvalidJSON(t *testing.T) {
|
||||
handler := HandleBatchTrackpoints(newFakeTrackpointStore())
|
||||
|
||||
req := httptest.NewRequest(http.MethodPost, "/v1/trackpoints:batch", strings.NewReader("{not array}"))
|
||||
req = authContext(req)
|
||||
rec := httptest.NewRecorder()
|
||||
|
||||
handler.ServeHTTP(rec, req)
|
||||
|
||||
if rec.Code != http.StatusBadRequest {
|
||||
t.Errorf("expected 400, got %d", rec.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestHandleBatchTrackpoints_PartialInvalidTimestamp(t *testing.T) {
|
||||
handler := HandleBatchTrackpoints(newFakeTrackpointStore())
|
||||
|
||||
// First item has valid timestamp, second has invalid
|
||||
body := `[
|
||||
{"event_id":"e1","device_id":"d1","timestamp":"2024-01-01T00:00:00Z","lat":10,"lon":10},
|
||||
{"event_id":"e2","device_id":"d1","timestamp":"not-a-date","lat":10,"lon":10}
|
||||
]`
|
||||
req := httptest.NewRequest(http.MethodPost, "/v1/trackpoints:batch", bytes.NewBufferString(body))
|
||||
req = authContext(req)
|
||||
rec := httptest.NewRecorder()
|
||||
|
||||
handler.ServeHTTP(rec, req)
|
||||
|
||||
if rec.Code != http.StatusOK {
|
||||
t.Fatalf("expected 200, got %d", rec.Code)
|
||||
}
|
||||
var resp batchResponse
|
||||
json.NewDecoder(rec.Body).Decode(&resp)
|
||||
if len(resp.AcceptedIDs) != 1 {
|
||||
t.Errorf("expected 1 accepted, got %d", len(resp.AcceptedIDs))
|
||||
}
|
||||
if len(resp.Rejected) != 1 || resp.Rejected[0].EventID != "e2" {
|
||||
t.Errorf("expected e2 in rejected, got %v", resp.Rejected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestHandleBatchTrackpoints_IdempotencyBatchSentTwice(t *testing.T) {
|
||||
store := newFakeTrackpointStore()
|
||||
handler := HandleBatchTrackpoints(store)
|
||||
|
||||
body := validBatchBody(5)
|
||||
|
||||
sendBatch := func() batchResponse {
|
||||
req := httptest.NewRequest(http.MethodPost, "/v1/trackpoints:batch", strings.NewReader(body))
|
||||
req = authContext(req)
|
||||
rec := httptest.NewRecorder()
|
||||
handler.ServeHTTP(rec, req)
|
||||
if rec.Code != http.StatusOK {
|
||||
t.Fatalf("expected 200, got %d", rec.Code)
|
||||
}
|
||||
var resp batchResponse
|
||||
json.NewDecoder(rec.Body).Decode(&resp)
|
||||
return resp
|
||||
}
|
||||
|
||||
r1 := sendBatch()
|
||||
r2 := sendBatch()
|
||||
|
||||
if len(r1.AcceptedIDs) != 5 {
|
||||
t.Errorf("first batch: expected 5 accepted, got %d", len(r1.AcceptedIDs))
|
||||
}
|
||||
if len(r2.AcceptedIDs) != 5 {
|
||||
t.Errorf("second batch (idempotent): expected 5 accepted, got %d", len(r2.AcceptedIDs))
|
||||
}
|
||||
// No duplicates stored
|
||||
if len(store.stored) != 5 {
|
||||
t.Errorf("expected 5 unique stored trackpoints, got %d", len(store.stored))
|
||||
}
|
||||
// accepted_ids must be identical for both calls
|
||||
for i := range r1.AcceptedIDs {
|
||||
if r1.AcceptedIDs[i] != r2.AcceptedIDs[i] {
|
||||
t.Errorf("accepted_ids differ at index %d: %q vs %q", i, r1.AcceptedIDs[i], r2.AcceptedIDs[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
253
backend/internal/api/journal.go
Normal file
253
backend/internal/api/journal.go
Normal file
@@ -0,0 +1,253 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"mime/multipart"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/go-chi/chi/v5"
|
||||
"github.com/jacek/pamietnik/backend/internal/db"
|
||||
"github.com/jacek/pamietnik/backend/internal/domain"
|
||||
)
|
||||
|
||||
const (
|
||||
maxUploadSize = 32 << 20 // 32 MB per request
|
||||
maxSingleImage = 10 << 20 // 10 MB per image
|
||||
)
|
||||
|
||||
var allowedMIME = map[string]string{
|
||||
"image/jpeg": ".jpg",
|
||||
"image/png": ".png",
|
||||
"image/webp": ".webp",
|
||||
"image/heic": ".heic",
|
||||
"image/gif": ".gif",
|
||||
"video/mp4": ".mp4",
|
||||
"video/webm": ".webm",
|
||||
"audio/mpeg": ".mp3",
|
||||
"audio/ogg": ".ogg",
|
||||
"audio/wav": ".wav",
|
||||
"audio/aac": ".aac",
|
||||
}
|
||||
|
||||
type JournalHandler struct {
|
||||
store *db.JournalStore
|
||||
uploadDir string
|
||||
}
|
||||
|
||||
func NewJournalHandler(store *db.JournalStore, uploadDir string) *JournalHandler {
|
||||
return &JournalHandler{store: store, uploadDir: uploadDir}
|
||||
}
|
||||
|
||||
// HandleGetEditEntry renders the edit form for an existing entry.
|
||||
func (h *JournalHandler) HandleGetEditEntry(w http.ResponseWriter, r *http.Request) {
|
||||
userID := userIDFromContext(r.Context())
|
||||
entryID := chi.URLParam(r, "id")
|
||||
entry, err := h.store.GetEntry(r.Context(), entryID, userID)
|
||||
if err != nil {
|
||||
http.Error(w, "Eintrag nicht gefunden", http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
render(w, r, "edit_entry.html", map[string]any{"Entry": entry})
|
||||
}
|
||||
|
||||
// HandleUpdateEntry handles POST /entries/{id} (multipart/form-data).
|
||||
func (h *JournalHandler) HandleUpdateEntry(w http.ResponseWriter, r *http.Request) {
|
||||
if err := r.ParseMultipartForm(maxUploadSize); err != nil {
|
||||
http.Error(w, "Formular zu groß", http.StatusRequestEntityTooLarge)
|
||||
return
|
||||
}
|
||||
userID := userIDFromContext(r.Context())
|
||||
entryID := chi.URLParam(r, "id")
|
||||
|
||||
// Verify ownership first
|
||||
existing, err := h.store.GetEntry(r.Context(), entryID, userID)
|
||||
if err != nil {
|
||||
http.Error(w, "Eintrag nicht gefunden", http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
|
||||
entryTime := strings.TrimSpace(r.FormValue("time"))
|
||||
title := strings.TrimSpace(r.FormValue("title"))
|
||||
description := strings.TrimSpace(r.FormValue("description"))
|
||||
visibility := r.FormValue("visibility")
|
||||
if visibility != "public" && visibility != "private" {
|
||||
visibility = "private"
|
||||
}
|
||||
var hashtags []string
|
||||
if raw := strings.TrimSpace(r.FormValue("hashtags")); raw != "" {
|
||||
for _, tag := range strings.Split(raw, ",") {
|
||||
tag = strings.TrimSpace(strings.TrimPrefix(strings.TrimSpace(tag), "#"))
|
||||
if tag != "" {
|
||||
hashtags = append(hashtags, tag)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
entry := domain.JournalEntry{
|
||||
EntryID: entryID,
|
||||
UserID: userID,
|
||||
EntryDate: existing.EntryDate,
|
||||
EntryTime: entryTime,
|
||||
Title: title,
|
||||
Description: description,
|
||||
Visibility: visibility,
|
||||
Hashtags: hashtags,
|
||||
}
|
||||
if lat := r.FormValue("lat"); lat != "" {
|
||||
var v float64
|
||||
if _, err := fmt.Sscanf(lat, "%f", &v); err == nil {
|
||||
entry.Lat = &v
|
||||
}
|
||||
}
|
||||
if lon := r.FormValue("lon"); lon != "" {
|
||||
var v float64
|
||||
if _, err := fmt.Sscanf(lon, "%f", &v); err == nil {
|
||||
entry.Lon = &v
|
||||
}
|
||||
}
|
||||
|
||||
if err := h.store.UpdateEntry(r.Context(), entry); err != nil {
|
||||
http.Error(w, "Datenbankfehler", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
if r.MultipartForm != nil {
|
||||
h.saveJournalImages(r, entryID, r.MultipartForm.File["images"])
|
||||
}
|
||||
|
||||
http.Redirect(w, r, "/days/"+existing.EntryDate, http.StatusSeeOther)
|
||||
}
|
||||
|
||||
// HandleCreateEntry handles POST /entries (multipart/form-data).
|
||||
func (h *JournalHandler) HandleCreateEntry(w http.ResponseWriter, r *http.Request) {
|
||||
if err := r.ParseMultipartForm(maxUploadSize); err != nil {
|
||||
http.Error(w, "Formular zu groß", http.StatusRequestEntityTooLarge)
|
||||
return
|
||||
}
|
||||
|
||||
userID := userIDFromContext(r.Context())
|
||||
date := strings.TrimSpace(r.FormValue("date"))
|
||||
entryTime := strings.TrimSpace(r.FormValue("time"))
|
||||
title := strings.TrimSpace(r.FormValue("title"))
|
||||
description := strings.TrimSpace(r.FormValue("description"))
|
||||
visibility := r.FormValue("visibility")
|
||||
if visibility != "public" && visibility != "private" {
|
||||
visibility = "private"
|
||||
}
|
||||
var hashtags []string
|
||||
if raw := strings.TrimSpace(r.FormValue("hashtags")); raw != "" {
|
||||
for _, tag := range strings.Split(raw, ",") {
|
||||
tag = strings.TrimSpace(tag)
|
||||
tag = strings.TrimPrefix(tag, "#")
|
||||
if tag != "" {
|
||||
hashtags = append(hashtags, tag)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if date == "" || entryTime == "" {
|
||||
http.Error(w, "Datum und Uhrzeit sind Pflichtfelder", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
entry := domain.JournalEntry{
|
||||
UserID: userID,
|
||||
EntryDate: date,
|
||||
EntryTime: entryTime,
|
||||
Title: title,
|
||||
Description: description,
|
||||
Visibility: visibility,
|
||||
Hashtags: hashtags,
|
||||
}
|
||||
|
||||
if lat := r.FormValue("lat"); lat != "" {
|
||||
var v float64
|
||||
if _, err := fmt.Sscanf(lat, "%f", &v); err == nil {
|
||||
entry.Lat = &v
|
||||
}
|
||||
}
|
||||
if lon := r.FormValue("lon"); lon != "" {
|
||||
var v float64
|
||||
if _, err := fmt.Sscanf(lon, "%f", &v); err == nil {
|
||||
entry.Lon = &v
|
||||
}
|
||||
}
|
||||
|
||||
saved, err := h.store.InsertEntry(r.Context(), entry)
|
||||
if err != nil {
|
||||
http.Error(w, "Datenbankfehler", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
if r.MultipartForm != nil {
|
||||
h.saveJournalImages(r, saved.EntryID, r.MultipartForm.File["images"])
|
||||
}
|
||||
|
||||
http.Redirect(w, r, "/days/"+date, http.StatusSeeOther)
|
||||
}
|
||||
|
||||
// saveJournalImages saves uploaded files for a journal entry, with image resizing.
|
||||
// Errors per file are logged and silently skipped so the entry is always saved.
|
||||
func (h *JournalHandler) saveJournalImages(r *http.Request, entryID string, files []*multipart.FileHeader) {
|
||||
ctx := r.Context()
|
||||
for _, fh := range files {
|
||||
if fh.Size == 0 || fh.Size > maxSingleImage {
|
||||
continue
|
||||
}
|
||||
f, err := fh.Open()
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
buf := make([]byte, 512)
|
||||
n, _ := f.Read(buf)
|
||||
mime := http.DetectContentType(buf[:n])
|
||||
if _, ok := allowedMIME[mime]; !ok {
|
||||
f.Close()
|
||||
continue
|
||||
}
|
||||
|
||||
baseName := sanitizeFilename(entryID + "_" + fh.Filename)
|
||||
filename, err := saveUpload(h.uploadDir, baseName, mime, buf[:n], f)
|
||||
f.Close()
|
||||
if err != nil {
|
||||
slog.Error("save upload", "entry_id", entryID, "err", err)
|
||||
continue
|
||||
}
|
||||
|
||||
img := domain.JournalImage{
|
||||
EntryID: entryID,
|
||||
Filename: filename,
|
||||
OriginalName: fh.Filename,
|
||||
MimeType: mime,
|
||||
SizeBytes: fh.Size,
|
||||
}
|
||||
if _, err := h.store.InsertImage(ctx, img); err != nil {
|
||||
slog.Error("insert image", "entry_id", entryID, "err", err)
|
||||
os.Remove(filepath.Join(h.uploadDir, filename))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// sanitizeFilename strips path separators and non-printable characters.
|
||||
func sanitizeFilename(name string) string {
|
||||
name = filepath.Base(name)
|
||||
var b strings.Builder
|
||||
for _, r := range name {
|
||||
if r == '/' || r == '\\' || r == ':' || r == '*' || r == '?' || r == '"' || r == '<' || r == '>' || r == '|' {
|
||||
b.WriteRune('_')
|
||||
} else {
|
||||
b.WriteRune(r)
|
||||
}
|
||||
}
|
||||
s := b.String()
|
||||
// strip extension — we append the detected one
|
||||
if idx := strings.LastIndex(s, "."); idx > 0 {
|
||||
s = s[:idx]
|
||||
}
|
||||
return s
|
||||
}
|
||||
76
backend/internal/api/media.go
Normal file
76
backend/internal/api/media.go
Normal file
@@ -0,0 +1,76 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"crypto/rand"
|
||||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type MediaHandler struct {
|
||||
uploadDir string
|
||||
}
|
||||
|
||||
func NewMediaHandler(uploadDir string) *MediaHandler {
|
||||
return &MediaHandler{uploadDir: uploadDir}
|
||||
}
|
||||
|
||||
// HandleUpload handles POST /media — uploads a single file and returns its markdown reference.
|
||||
func (h *MediaHandler) HandleUpload(w http.ResponseWriter, r *http.Request) {
|
||||
if err := r.ParseMultipartForm(maxUploadSize); err != nil {
|
||||
http.Error(w, "too large", http.StatusRequestEntityTooLarge)
|
||||
return
|
||||
}
|
||||
fh, _, err := r.FormFile("file")
|
||||
if err != nil {
|
||||
http.Error(w, "missing file", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
defer fh.Close()
|
||||
|
||||
buf := make([]byte, 512)
|
||||
n, _ := fh.Read(buf)
|
||||
mime := http.DetectContentType(buf[:n])
|
||||
if _, ok := allowedMIME[mime]; !ok {
|
||||
http.Error(w, "unsupported type", http.StatusUnsupportedMediaType)
|
||||
return
|
||||
}
|
||||
|
||||
filename, err := saveUpload(h.uploadDir, randomID(), mime, buf[:n], fh)
|
||||
if err != nil {
|
||||
http.Error(w, "storage error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
ref := markdownRef(mime, filename)
|
||||
slog.Info("media uploaded", "filename", filename, "mime", mime)
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
json.NewEncoder(w).Encode(map[string]string{
|
||||
"filename": filename,
|
||||
"mime": mime,
|
||||
"ref": ref,
|
||||
})
|
||||
}
|
||||
|
||||
func markdownRef(mime, filename string) string {
|
||||
url := "/uploads/" + filename
|
||||
switch {
|
||||
case strings.HasPrefix(mime, "image/"):
|
||||
return ""
|
||||
case strings.HasPrefix(mime, "video/"):
|
||||
return ""
|
||||
case strings.HasPrefix(mime, "audio/"):
|
||||
return "[" + filename + "](" + url + ")"
|
||||
default:
|
||||
return "[" + filename + "](" + url + ")"
|
||||
}
|
||||
}
|
||||
|
||||
func randomID() string {
|
||||
b := make([]byte, 12)
|
||||
_, _ = rand.Read(b)
|
||||
return hex.EncodeToString(b)
|
||||
}
|
||||
76
backend/internal/api/middleware.go
Normal file
76
backend/internal/api/middleware.go
Normal file
@@ -0,0 +1,76 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
|
||||
"github.com/jacek/pamietnik/backend/internal/auth"
|
||||
"github.com/jacek/pamietnik/backend/internal/domain"
|
||||
)
|
||||
|
||||
type contextKey string
|
||||
|
||||
const ctxUserID contextKey = "user_id"
|
||||
const ctxUser contextKey = "user"
|
||||
|
||||
const sessionCookieName = "session"
|
||||
|
||||
// RequireAuth validates the session cookie and stores user info in context.
|
||||
// On failure it redirects to /login for browser requests (text/html) or returns JSON 401.
|
||||
func RequireAuth(authStore *auth.Store) func(http.Handler) http.Handler {
|
||||
return func(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
user, err := userFromRequest(r, authStore)
|
||||
if err != nil {
|
||||
redirectOrUnauthorized(w, r)
|
||||
return
|
||||
}
|
||||
ctx := context.WithValue(r.Context(), ctxUserID, user.UserID)
|
||||
ctx = context.WithValue(ctx, ctxUser, user)
|
||||
next.ServeHTTP(w, r.WithContext(ctx))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// requireAdmin checks that the authenticated user is an admin.
|
||||
func requireAdmin(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
u, ok := r.Context().Value(ctxUser).(domain.User)
|
||||
if !ok || !u.IsAdmin {
|
||||
http.Redirect(w, r, "/days", http.StatusSeeOther)
|
||||
return
|
||||
}
|
||||
next.ServeHTTP(w, r)
|
||||
})
|
||||
}
|
||||
|
||||
func userFromRequest(r *http.Request, authStore *auth.Store) (domain.User, error) {
|
||||
cookie, err := r.Cookie(sessionCookieName)
|
||||
if err != nil {
|
||||
return domain.User{}, auth.ErrSessionNotFound
|
||||
}
|
||||
return authStore.GetUserBySession(r.Context(), cookie.Value)
|
||||
}
|
||||
|
||||
func redirectOrUnauthorized(w http.ResponseWriter, r *http.Request) {
|
||||
accept := r.Header.Get("Accept")
|
||||
if len(accept) > 0 && (accept == "application/json" || r.Header.Get("X-Requested-With") == "XMLHttpRequest") {
|
||||
writeError(w, http.StatusUnauthorized, "UNAUTHORIZED", "login required")
|
||||
return
|
||||
}
|
||||
http.Redirect(w, r, "/login", http.StatusSeeOther)
|
||||
}
|
||||
|
||||
func userIDFromContext(ctx context.Context) string {
|
||||
v, _ := ctx.Value(ctxUserID).(string)
|
||||
return v
|
||||
}
|
||||
|
||||
func userFromContext(ctx context.Context) domain.User {
|
||||
v, _ := ctx.Value(ctxUser).(domain.User)
|
||||
return v
|
||||
}
|
||||
|
||||
func contextWithUserID(ctx context.Context, userID string) context.Context {
|
||||
return context.WithValue(ctx, ctxUserID, userID)
|
||||
}
|
||||
102
backend/internal/api/query.go
Normal file
102
backend/internal/api/query.go
Normal file
@@ -0,0 +1,102 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"log/slog"
|
||||
"net/http"
|
||||
|
||||
"github.com/jacek/pamietnik/backend/internal/db"
|
||||
"github.com/jacek/pamietnik/backend/internal/domain"
|
||||
)
|
||||
|
||||
// HandleListDays handles GET /v1/days?from=YYYY-MM-DD&to=YYYY-MM-DD
|
||||
func HandleListDays(store db.TrackpointStorer) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
userID := userIDFromContext(r.Context())
|
||||
from := r.URL.Query().Get("from")
|
||||
to := r.URL.Query().Get("to")
|
||||
if from == "" || to == "" {
|
||||
writeError(w, http.StatusBadRequest, "BAD_REQUEST", "from and to are required (YYYY-MM-DD)")
|
||||
return
|
||||
}
|
||||
|
||||
days, err := store.ListDays(r.Context(), userID, from, to)
|
||||
if err != nil {
|
||||
slog.Error("list days", "user_id", userID, "err", err)
|
||||
writeError(w, http.StatusInternalServerError, "INTERNAL_ERROR", "database error")
|
||||
return
|
||||
}
|
||||
if days == nil {
|
||||
days = []domain.DaySummary{}
|
||||
}
|
||||
writeJSON(w, http.StatusOK, days)
|
||||
}
|
||||
}
|
||||
|
||||
// HandleListTrackpoints handles GET /v1/trackpoints?date=YYYY-MM-DD
|
||||
func HandleListTrackpoints(store db.TrackpointStorer) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
userID := userIDFromContext(r.Context())
|
||||
date := r.URL.Query().Get("date")
|
||||
if date == "" {
|
||||
writeError(w, http.StatusBadRequest, "BAD_REQUEST", "date is required (YYYY-MM-DD)")
|
||||
return
|
||||
}
|
||||
|
||||
points, err := store.ListByDate(r.Context(), userID, date)
|
||||
if err != nil {
|
||||
slog.Error("list trackpoints", "user_id", userID, "date", date, "err", err)
|
||||
writeError(w, http.StatusInternalServerError, "INTERNAL_ERROR", "database error")
|
||||
return
|
||||
}
|
||||
if points == nil {
|
||||
points = []domain.Trackpoint{}
|
||||
}
|
||||
writeJSON(w, http.StatusOK, points)
|
||||
}
|
||||
}
|
||||
|
||||
// HandleListStops handles GET /v1/stops?date=YYYY-MM-DD
|
||||
func HandleListStops(store db.StopStorer) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
userID := userIDFromContext(r.Context())
|
||||
date := r.URL.Query().Get("date")
|
||||
if date == "" {
|
||||
writeError(w, http.StatusBadRequest, "BAD_REQUEST", "date is required (YYYY-MM-DD)")
|
||||
return
|
||||
}
|
||||
|
||||
stops, err := store.ListByDate(r.Context(), userID, date)
|
||||
if err != nil {
|
||||
slog.Error("list stops", "user_id", userID, "date", date, "err", err)
|
||||
writeError(w, http.StatusInternalServerError, "INTERNAL_ERROR", "database error")
|
||||
return
|
||||
}
|
||||
if stops == nil {
|
||||
stops = []domain.Stop{}
|
||||
}
|
||||
writeJSON(w, http.StatusOK, stops)
|
||||
}
|
||||
}
|
||||
|
||||
// HandleListSuggestions handles GET /v1/suggestions?date=YYYY-MM-DD
|
||||
func HandleListSuggestions(store db.SuggestionStorer) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
userID := userIDFromContext(r.Context())
|
||||
date := r.URL.Query().Get("date")
|
||||
if date == "" {
|
||||
writeError(w, http.StatusBadRequest, "BAD_REQUEST", "date is required (YYYY-MM-DD)")
|
||||
return
|
||||
}
|
||||
|
||||
suggestions, err := store.ListByDate(r.Context(), userID, date)
|
||||
if err != nil {
|
||||
slog.Error("list suggestions", "user_id", userID, "date", date, "err", err)
|
||||
writeError(w, http.StatusInternalServerError, "INTERNAL_ERROR", "database error")
|
||||
return
|
||||
}
|
||||
if suggestions == nil {
|
||||
suggestions = []domain.Suggestion{}
|
||||
}
|
||||
writeJSON(w, http.StatusOK, suggestions)
|
||||
}
|
||||
}
|
||||
318
backend/internal/api/query_test.go
Normal file
318
backend/internal/api/query_test.go
Normal file
@@ -0,0 +1,318 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/jacek/pamietnik/backend/internal/db"
|
||||
"github.com/jacek/pamietnik/backend/internal/domain"
|
||||
)
|
||||
|
||||
// fakeQueryTrackpointStore implements db.TrackpointStorer for query handler tests.
|
||||
type fakeQueryTrackpointStore struct {
|
||||
days []domain.DaySummary
|
||||
points []domain.Trackpoint
|
||||
err error
|
||||
}
|
||||
|
||||
func (f *fakeQueryTrackpointStore) UpsertBatch(_ context.Context, _ string, _ []domain.Trackpoint) ([]string, []db.RejectedItem, error) {
|
||||
return nil, nil, nil
|
||||
}
|
||||
|
||||
func (f *fakeQueryTrackpointStore) ListByDate(_ context.Context, _, _ string) ([]domain.Trackpoint, error) {
|
||||
return f.points, f.err
|
||||
}
|
||||
|
||||
func (f *fakeQueryTrackpointStore) ListDays(_ context.Context, _, _, _ string) ([]domain.DaySummary, error) {
|
||||
return f.days, f.err
|
||||
}
|
||||
|
||||
// fakeStopStore implements db.StopStorer.
|
||||
type fakeStopStore struct {
|
||||
stops []domain.Stop
|
||||
err error
|
||||
}
|
||||
|
||||
func (f *fakeStopStore) ListByDate(_ context.Context, _, _ string) ([]domain.Stop, error) {
|
||||
return f.stops, f.err
|
||||
}
|
||||
|
||||
// fakeSuggestionStore implements db.SuggestionStorer.
|
||||
type fakeSuggestionStore struct {
|
||||
suggestions []domain.Suggestion
|
||||
err error
|
||||
}
|
||||
|
||||
func (f *fakeSuggestionStore) ListByDate(_ context.Context, _, _ string) ([]domain.Suggestion, error) {
|
||||
return f.suggestions, f.err
|
||||
}
|
||||
|
||||
// --- HandleListDays ---
|
||||
|
||||
func TestHandleListDays_MissingFromParam(t *testing.T) {
|
||||
handler := HandleListDays(&fakeQueryTrackpointStore{})
|
||||
|
||||
req := httptest.NewRequest(http.MethodGet, "/v1/days?to=2024-06-30", nil)
|
||||
req = authContext(req)
|
||||
rec := httptest.NewRecorder()
|
||||
|
||||
handler.ServeHTTP(rec, req)
|
||||
|
||||
if rec.Code != http.StatusBadRequest {
|
||||
t.Errorf("expected 400 when 'from' missing, got %d", rec.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestHandleListDays_MissingToParam(t *testing.T) {
|
||||
handler := HandleListDays(&fakeQueryTrackpointStore{})
|
||||
|
||||
req := httptest.NewRequest(http.MethodGet, "/v1/days?from=2024-06-01", nil)
|
||||
req = authContext(req)
|
||||
rec := httptest.NewRecorder()
|
||||
|
||||
handler.ServeHTTP(rec, req)
|
||||
|
||||
if rec.Code != http.StatusBadRequest {
|
||||
t.Errorf("expected 400 when 'to' missing, got %d", rec.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestHandleListDays_BothParamsMissing(t *testing.T) {
|
||||
handler := HandleListDays(&fakeQueryTrackpointStore{})
|
||||
|
||||
req := httptest.NewRequest(http.MethodGet, "/v1/days", nil)
|
||||
req = authContext(req)
|
||||
rec := httptest.NewRecorder()
|
||||
|
||||
handler.ServeHTTP(rec, req)
|
||||
|
||||
if rec.Code != http.StatusBadRequest {
|
||||
t.Errorf("expected 400 when both params missing, got %d", rec.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestHandleListDays_EmptyResultIsArray(t *testing.T) {
|
||||
handler := HandleListDays(&fakeQueryTrackpointStore{days: nil})
|
||||
|
||||
req := httptest.NewRequest(http.MethodGet, "/v1/days?from=2024-06-01&to=2024-06-30", nil)
|
||||
req = authContext(req)
|
||||
rec := httptest.NewRecorder()
|
||||
|
||||
handler.ServeHTTP(rec, req)
|
||||
|
||||
if rec.Code != http.StatusOK {
|
||||
t.Fatalf("expected 200, got %d", rec.Code)
|
||||
}
|
||||
// Must be a JSON array, not null
|
||||
body := strings.TrimSpace(rec.Body.String())
|
||||
if !strings.HasPrefix(body, "[") {
|
||||
t.Errorf("expected JSON array, got: %s", body)
|
||||
}
|
||||
}
|
||||
|
||||
func TestHandleListDays_ReturnsDays(t *testing.T) {
|
||||
ts := time.Now()
|
||||
store := &fakeQueryTrackpointStore{
|
||||
days: []domain.DaySummary{
|
||||
{Date: "2024-06-01", Count: 42, FirstTS: &ts, LastTS: &ts},
|
||||
},
|
||||
}
|
||||
handler := HandleListDays(store)
|
||||
|
||||
req := httptest.NewRequest(http.MethodGet, "/v1/days?from=2024-06-01&to=2024-06-30", nil)
|
||||
req = authContext(req)
|
||||
rec := httptest.NewRecorder()
|
||||
|
||||
handler.ServeHTTP(rec, req)
|
||||
|
||||
if rec.Code != http.StatusOK {
|
||||
t.Fatalf("expected 200, got %d", rec.Code)
|
||||
}
|
||||
var days []domain.DaySummary
|
||||
json.NewDecoder(rec.Body).Decode(&days)
|
||||
if len(days) != 1 || days[0].Date != "2024-06-01" || days[0].Count != 42 {
|
||||
t.Errorf("unexpected response: %+v", days)
|
||||
}
|
||||
}
|
||||
|
||||
// --- HandleListTrackpoints ---
|
||||
|
||||
func TestHandleListTrackpoints_MissingDateParam(t *testing.T) {
|
||||
handler := HandleListTrackpoints(&fakeQueryTrackpointStore{})
|
||||
|
||||
req := httptest.NewRequest(http.MethodGet, "/v1/trackpoints", nil)
|
||||
req = authContext(req)
|
||||
rec := httptest.NewRecorder()
|
||||
|
||||
handler.ServeHTTP(rec, req)
|
||||
|
||||
if rec.Code != http.StatusBadRequest {
|
||||
t.Errorf("expected 400 when 'date' missing, got %d", rec.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestHandleListTrackpoints_EmptyResultIsArray(t *testing.T) {
|
||||
handler := HandleListTrackpoints(&fakeQueryTrackpointStore{points: nil})
|
||||
|
||||
req := httptest.NewRequest(http.MethodGet, "/v1/trackpoints?date=2024-06-01", nil)
|
||||
req = authContext(req)
|
||||
rec := httptest.NewRecorder()
|
||||
|
||||
handler.ServeHTTP(rec, req)
|
||||
|
||||
if rec.Code != http.StatusOK {
|
||||
t.Fatalf("expected 200, got %d", rec.Code)
|
||||
}
|
||||
body := strings.TrimSpace(rec.Body.String())
|
||||
if !strings.HasPrefix(body, "[") {
|
||||
t.Errorf("expected JSON array, got: %s", body)
|
||||
}
|
||||
}
|
||||
|
||||
func TestHandleListTrackpoints_ReturnsPoints(t *testing.T) {
|
||||
store := &fakeQueryTrackpointStore{
|
||||
points: []domain.Trackpoint{
|
||||
{EventID: "e1", DeviceID: "d1", Lat: 52.5, Lon: 13.4, Source: "gps"},
|
||||
{EventID: "e2", DeviceID: "d1", Lat: 52.6, Lon: 13.5, Source: "gps"},
|
||||
},
|
||||
}
|
||||
handler := HandleListTrackpoints(store)
|
||||
|
||||
req := httptest.NewRequest(http.MethodGet, "/v1/trackpoints?date=2024-06-01", nil)
|
||||
req = authContext(req)
|
||||
rec := httptest.NewRecorder()
|
||||
|
||||
handler.ServeHTTP(rec, req)
|
||||
|
||||
if rec.Code != http.StatusOK {
|
||||
t.Fatalf("expected 200, got %d", rec.Code)
|
||||
}
|
||||
var pts []domain.Trackpoint
|
||||
json.NewDecoder(rec.Body).Decode(&pts)
|
||||
if len(pts) != 2 {
|
||||
t.Errorf("expected 2 trackpoints, got %d", len(pts))
|
||||
}
|
||||
}
|
||||
|
||||
// --- HandleListStops ---
|
||||
|
||||
func TestHandleListStops_MissingDateParam(t *testing.T) {
|
||||
handler := HandleListStops(&fakeStopStore{})
|
||||
|
||||
req := httptest.NewRequest(http.MethodGet, "/v1/stops", nil)
|
||||
req = authContext(req)
|
||||
rec := httptest.NewRecorder()
|
||||
|
||||
handler.ServeHTTP(rec, req)
|
||||
|
||||
if rec.Code != http.StatusBadRequest {
|
||||
t.Errorf("expected 400 when 'date' missing, got %d", rec.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestHandleListStops_EmptyResultIsArray(t *testing.T) {
|
||||
handler := HandleListStops(&fakeStopStore{stops: nil})
|
||||
|
||||
req := httptest.NewRequest(http.MethodGet, "/v1/stops?date=2024-06-01", nil)
|
||||
req = authContext(req)
|
||||
rec := httptest.NewRecorder()
|
||||
|
||||
handler.ServeHTTP(rec, req)
|
||||
|
||||
if rec.Code != http.StatusOK {
|
||||
t.Fatalf("expected 200, got %d", rec.Code)
|
||||
}
|
||||
body := strings.TrimSpace(rec.Body.String())
|
||||
if !strings.HasPrefix(body, "[") {
|
||||
t.Errorf("expected JSON array, got: %s", body)
|
||||
}
|
||||
}
|
||||
|
||||
func TestHandleListStops_ReturnsStops(t *testing.T) {
|
||||
now := time.Now()
|
||||
store := &fakeStopStore{
|
||||
stops: []domain.Stop{
|
||||
{StopID: "stop-1", DeviceID: "d1", StartTS: now, EndTS: now, CenterLat: 52.5, CenterLon: 13.4, DurationS: 600},
|
||||
},
|
||||
}
|
||||
handler := HandleListStops(store)
|
||||
|
||||
req := httptest.NewRequest(http.MethodGet, "/v1/stops?date=2024-06-01", nil)
|
||||
req = authContext(req)
|
||||
rec := httptest.NewRecorder()
|
||||
|
||||
handler.ServeHTTP(rec, req)
|
||||
|
||||
if rec.Code != http.StatusOK {
|
||||
t.Fatalf("expected 200, got %d", rec.Code)
|
||||
}
|
||||
var stops []domain.Stop
|
||||
json.NewDecoder(rec.Body).Decode(&stops)
|
||||
if len(stops) != 1 || stops[0].StopID != "stop-1" {
|
||||
t.Errorf("unexpected response: %+v", stops)
|
||||
}
|
||||
}
|
||||
|
||||
// --- HandleListSuggestions ---
|
||||
|
||||
func TestHandleListSuggestions_MissingDateParam(t *testing.T) {
|
||||
handler := HandleListSuggestions(&fakeSuggestionStore{})
|
||||
|
||||
req := httptest.NewRequest(http.MethodGet, "/v1/suggestions", nil)
|
||||
req = authContext(req)
|
||||
rec := httptest.NewRecorder()
|
||||
|
||||
handler.ServeHTTP(rec, req)
|
||||
|
||||
if rec.Code != http.StatusBadRequest {
|
||||
t.Errorf("expected 400 when 'date' missing, got %d", rec.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestHandleListSuggestions_EmptyResultIsArray(t *testing.T) {
|
||||
handler := HandleListSuggestions(&fakeSuggestionStore{suggestions: nil})
|
||||
|
||||
req := httptest.NewRequest(http.MethodGet, "/v1/suggestions?date=2024-06-01", nil)
|
||||
req = authContext(req)
|
||||
rec := httptest.NewRecorder()
|
||||
|
||||
handler.ServeHTTP(rec, req)
|
||||
|
||||
if rec.Code != http.StatusOK {
|
||||
t.Fatalf("expected 200, got %d", rec.Code)
|
||||
}
|
||||
body := strings.TrimSpace(rec.Body.String())
|
||||
if !strings.HasPrefix(body, "[") {
|
||||
t.Errorf("expected JSON array, got: %s", body)
|
||||
}
|
||||
}
|
||||
|
||||
func TestHandleListSuggestions_ReturnsSuggestions(t *testing.T) {
|
||||
now := time.Now()
|
||||
store := &fakeSuggestionStore{
|
||||
suggestions: []domain.Suggestion{
|
||||
{SuggestionID: "sug-1", StopID: "stop-1", Type: "highlight", Title: "Nice spot", CreatedAt: now},
|
||||
},
|
||||
}
|
||||
handler := HandleListSuggestions(store)
|
||||
|
||||
req := httptest.NewRequest(http.MethodGet, "/v1/suggestions?date=2024-06-01", nil)
|
||||
req = authContext(req)
|
||||
rec := httptest.NewRecorder()
|
||||
|
||||
handler.ServeHTTP(rec, req)
|
||||
|
||||
if rec.Code != http.StatusOK {
|
||||
t.Fatalf("expected 200, got %d", rec.Code)
|
||||
}
|
||||
var suggestions []domain.Suggestion
|
||||
json.NewDecoder(rec.Body).Decode(&suggestions)
|
||||
if len(suggestions) != 1 || suggestions[0].SuggestionID != "sug-1" {
|
||||
t.Errorf("unexpected response: %+v", suggestions)
|
||||
}
|
||||
}
|
||||
21
backend/internal/api/response.go
Normal file
21
backend/internal/api/response.go
Normal file
@@ -0,0 +1,21 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
func writeJSON(w http.ResponseWriter, status int, v any) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.WriteHeader(status)
|
||||
json.NewEncoder(w).Encode(v)
|
||||
}
|
||||
|
||||
type errorResponse struct {
|
||||
Code string `json:"code"`
|
||||
Message string `json:"message"`
|
||||
}
|
||||
|
||||
func writeError(w http.ResponseWriter, status int, code, message string) {
|
||||
writeJSON(w, status, errorResponse{Code: code, Message: message})
|
||||
}
|
||||
102
backend/internal/api/router.go
Normal file
102
backend/internal/api/router.go
Normal file
@@ -0,0 +1,102 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/go-chi/chi/v5"
|
||||
"github.com/go-chi/chi/v5/middleware"
|
||||
|
||||
"github.com/jacek/pamietnik/backend/internal/auth"
|
||||
"github.com/jacek/pamietnik/backend/internal/db"
|
||||
)
|
||||
|
||||
func NewRouter(
|
||||
authStore *auth.Store,
|
||||
tpStore *db.TrackpointStore,
|
||||
stopStore *db.StopStore,
|
||||
suggStore *db.SuggestionStore,
|
||||
journalStore *db.JournalStore,
|
||||
userStore *db.UserStore,
|
||||
uploadDir string,
|
||||
) http.Handler {
|
||||
r := chi.NewRouter()
|
||||
r.Use(middleware.RealIP)
|
||||
r.Use(middleware.Logger)
|
||||
r.Use(middleware.Recoverer)
|
||||
|
||||
webUI := NewWebUI(authStore, tpStore, stopStore, journalStore, userStore)
|
||||
journalHandler := NewJournalHandler(journalStore, uploadDir)
|
||||
mediaHandler := NewMediaHandler(uploadDir)
|
||||
authMW := RequireAuth(authStore)
|
||||
|
||||
// Health
|
||||
r.Get("/healthz", func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Write([]byte("ok"))
|
||||
})
|
||||
r.Get("/readyz", func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Write([]byte("ok"))
|
||||
})
|
||||
|
||||
// Ingest (session auth; Android API-Key auth TBD)
|
||||
r.Group(func(r chi.Router) {
|
||||
r.Use(authMW)
|
||||
r.Post("/v1/trackpoints", HandleSingleTrackpoint(tpStore))
|
||||
r.Post("/v1/trackpoints:batch", HandleBatchTrackpoints(tpStore))
|
||||
})
|
||||
|
||||
// Query API (session auth)
|
||||
r.Group(func(r chi.Router) {
|
||||
r.Use(authMW)
|
||||
r.Get("/v1/days", HandleListDays(tpStore))
|
||||
r.Get("/v1/trackpoints", HandleListTrackpoints(tpStore))
|
||||
r.Get("/v1/stops", HandleListStops(stopStore))
|
||||
r.Get("/v1/suggestions", HandleListSuggestions(suggStore))
|
||||
})
|
||||
|
||||
// Static assets (CSS etc.)
|
||||
r.Handle("/static/*", http.StripPrefix("/static/", http.FileServer(http.FS(staticFS()))))
|
||||
|
||||
// Public routes (no auth required)
|
||||
r.Get("/", webUI.HandleFeed)
|
||||
r.Get("/feed", webUI.HandleFeedFragment)
|
||||
r.Get("/register", webUI.HandleGetRegister)
|
||||
r.Post("/register", webUI.HandlePostRegister)
|
||||
r.Get("/login", webUI.HandleGetLogin)
|
||||
r.Post("/login", webUI.HandlePostLogin)
|
||||
r.Post("/logout", webUI.HandleLogout)
|
||||
|
||||
// Authenticated web routes
|
||||
r.Group(func(r chi.Router) {
|
||||
r.Use(authMW)
|
||||
r.Get("/days", webUI.HandleDaysList)
|
||||
r.Get("/days/redirect", webUI.HandleDaysRedirect)
|
||||
r.Get("/days/{date}", webUI.HandleDayDetail)
|
||||
r.Post("/media", mediaHandler.HandleUpload)
|
||||
r.Post("/entries", journalHandler.HandleCreateEntry)
|
||||
r.Get("/entries/{id}/edit", journalHandler.HandleGetEditEntry)
|
||||
r.Post("/entries/{id}", journalHandler.HandleUpdateEntry)
|
||||
})
|
||||
|
||||
// Admin routes
|
||||
r.Group(func(r chi.Router) {
|
||||
r.Use(authMW)
|
||||
r.Use(requireAdmin)
|
||||
r.Get("/admin", func(w http.ResponseWriter, r *http.Request) {
|
||||
http.Redirect(w, r, "/admin/entries", http.StatusSeeOther)
|
||||
})
|
||||
r.Get("/admin/entries", webUI.HandleAdminEntries)
|
||||
r.Get("/admin/users", webUI.HandleAdminUsers)
|
||||
r.Post("/admin/users", webUI.HandleAdminCreateUser)
|
||||
r.Delete("/admin/users/{id}", webUI.HandleAdminDeleteUser)
|
||||
})
|
||||
|
||||
// Serve uploaded images
|
||||
r.Handle("/uploads/*", http.StripPrefix("/uploads/", http.FileServer(http.Dir(uploadDir))))
|
||||
|
||||
// SPA (Vite webapp) — served under /app/*
|
||||
spaPrefix := "/app"
|
||||
r.Handle(spaPrefix, http.RedirectHandler(spaPrefix+"/", http.StatusMovedPermanently))
|
||||
r.Handle(spaPrefix+"/*", http.StripPrefix(spaPrefix, SPAHandler(spaPrefix)))
|
||||
|
||||
return r
|
||||
}
|
||||
53
backend/internal/api/spa.go
Normal file
53
backend/internal/api/spa.go
Normal file
@@ -0,0 +1,53 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"embed"
|
||||
"io/fs"
|
||||
"net/http"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// spaFS holds the built Vite SPA.
|
||||
// The directory backend/internal/api/webapp/ is populated by the Docker
|
||||
// multi-stage build (node → copy dist → go build).
|
||||
// A placeholder file keeps the embed valid when building without Docker.
|
||||
|
||||
//go:embed webapp
|
||||
var spaFS embed.FS
|
||||
|
||||
// SPAHandler serves the Vite SPA under the given prefix (e.g. "/app").
|
||||
// Static assets (paths with file extensions) are served directly.
|
||||
// All other paths fall back to index.html for client-side routing.
|
||||
func SPAHandler(prefix string) http.Handler {
|
||||
sub, err := fs.Sub(spaFS, "webapp")
|
||||
if err != nil {
|
||||
return http.NotFoundHandler()
|
||||
}
|
||||
fileServer := http.FileServer(http.FS(sub))
|
||||
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
// Strip the mount prefix to get the file path
|
||||
path := strings.TrimPrefix(r.URL.Path, prefix)
|
||||
if path == "" || path == "/" {
|
||||
// Serve index.html
|
||||
r2 := r.Clone(r.Context())
|
||||
r2.URL.Path = "/index.html"
|
||||
fileServer.ServeHTTP(w, r2)
|
||||
return
|
||||
}
|
||||
|
||||
// Has a file extension → serve asset directly (JS, CSS, fonts, …)
|
||||
if filepath.Ext(path) != "" {
|
||||
r2 := r.Clone(r.Context())
|
||||
r2.URL.Path = path
|
||||
fileServer.ServeHTTP(w, r2)
|
||||
return
|
||||
}
|
||||
|
||||
// SPA route → serve index.html
|
||||
r2 := r.Clone(r.Context())
|
||||
r2.URL.Path = "/index.html"
|
||||
fileServer.ServeHTTP(w, r2)
|
||||
})
|
||||
}
|
||||
103
backend/internal/api/static/autoplay.js
Normal file
103
backend/internal/api/static/autoplay.js
Normal file
@@ -0,0 +1,103 @@
|
||||
"use strict";
|
||||
(function () {
|
||||
'use strict';
|
||||
/* ── Background player ───────────────────────────────────── */
|
||||
const bgAudio = new Audio();
|
||||
let bgPlaying = false;
|
||||
let bgBar = null;
|
||||
let bgTitle = null;
|
||||
let bgPlayBtn = null;
|
||||
function createBgBar() {
|
||||
var _a;
|
||||
if (bgBar)
|
||||
return;
|
||||
bgBar = document.createElement('div');
|
||||
bgBar.id = 'bg-bar';
|
||||
bgBar.innerHTML =
|
||||
'<span id="bg-title"></span>' +
|
||||
'<button id="bg-play" aria-label="Abspielen">▶</button>' +
|
||||
'<button id="bg-close" aria-label="Schließen">✕</button>';
|
||||
document.body.appendChild(bgBar);
|
||||
bgTitle = document.getElementById('bg-title');
|
||||
bgPlayBtn = document.getElementById('bg-play');
|
||||
bgPlayBtn.addEventListener('click', function () {
|
||||
if (bgAudio.paused)
|
||||
void bgAudio.play();
|
||||
else
|
||||
bgAudio.pause();
|
||||
});
|
||||
(_a = document.getElementById('bg-close')) === null || _a === void 0 ? void 0 : _a.addEventListener('click', function () {
|
||||
bgAudio.pause();
|
||||
if (bgBar)
|
||||
bgBar.style.display = 'none';
|
||||
});
|
||||
bgAudio.addEventListener('play', function () { if (bgPlayBtn)
|
||||
bgPlayBtn.textContent = '⏸'; });
|
||||
bgAudio.addEventListener('pause', function () { if (bgPlayBtn)
|
||||
bgPlayBtn.textContent = '▶'; });
|
||||
bgAudio.addEventListener('ended', function () { if (bgPlayBtn)
|
||||
bgPlayBtn.textContent = '▶'; });
|
||||
}
|
||||
function sendToBg(src, title) {
|
||||
createBgBar();
|
||||
if (bgBar)
|
||||
bgBar.style.display = 'flex';
|
||||
bgAudio.src = src;
|
||||
if (bgTitle)
|
||||
bgTitle.textContent = title;
|
||||
void bgAudio.play();
|
||||
}
|
||||
// Attach "♪" button to every inline audio player
|
||||
document.querySelectorAll('audio.media-audio').forEach(function (a) {
|
||||
const btn = document.createElement('button');
|
||||
btn.className = 'btn-bg-music';
|
||||
btn.textContent = '♪ Hintergrundmusik';
|
||||
btn.type = 'button';
|
||||
const title = a.title || a.src.split('/').pop() || a.src;
|
||||
btn.addEventListener('click', function () { sendToBg(a.src, title); });
|
||||
a.insertAdjacentElement('afterend', btn);
|
||||
});
|
||||
/* ── Video autoplay + coordination ──────────────────────── */
|
||||
const obs = new IntersectionObserver(function (entries) {
|
||||
entries.forEach(function (e) {
|
||||
const v = e.target;
|
||||
if (e.isIntersecting) {
|
||||
void v.play();
|
||||
}
|
||||
else {
|
||||
v.pause();
|
||||
}
|
||||
});
|
||||
}, { threshold: 0.3 });
|
||||
document.querySelectorAll('video.media-embed').forEach(function (v) {
|
||||
v.muted = true;
|
||||
v.loop = true;
|
||||
v.setAttribute('playsinline', '');
|
||||
obs.observe(v);
|
||||
// User unmutes → pause background music
|
||||
v.addEventListener('volumechange', function () {
|
||||
if (!v.muted && !v.paused) {
|
||||
bgPlaying = !bgAudio.paused;
|
||||
bgAudio.pause();
|
||||
}
|
||||
// Video muted again → resume background
|
||||
if (v.muted && bgPlaying) {
|
||||
void bgAudio.play();
|
||||
bgPlaying = false;
|
||||
}
|
||||
});
|
||||
// Video pauses or ends → resume background if it was playing
|
||||
v.addEventListener('pause', function () {
|
||||
if (bgPlaying) {
|
||||
void bgAudio.play();
|
||||
bgPlaying = false;
|
||||
}
|
||||
});
|
||||
v.addEventListener('ended', function () {
|
||||
if (bgPlaying) {
|
||||
void bgAudio.play();
|
||||
bgPlaying = false;
|
||||
}
|
||||
});
|
||||
});
|
||||
})();
|
||||
28
backend/internal/api/static/day.js
Normal file
28
backend/internal/api/static/day.js
Normal file
@@ -0,0 +1,28 @@
|
||||
"use strict";
|
||||
var _a;
|
||||
// GPS button
|
||||
(_a = document.getElementById('btn-gps')) === null || _a === void 0 ? void 0 : _a.addEventListener('click', function () {
|
||||
const status = document.getElementById('gps-status');
|
||||
if (!navigator.geolocation) {
|
||||
status.textContent = '// GPS nicht verfügbar';
|
||||
return;
|
||||
}
|
||||
status.textContent = '// Standort wird ermittelt...';
|
||||
navigator.geolocation.getCurrentPosition(function (pos) {
|
||||
document.getElementById('entry-lat').value = pos.coords.latitude.toFixed(6);
|
||||
document.getElementById('entry-lon').value = pos.coords.longitude.toFixed(6);
|
||||
status.textContent = '// Standort gesetzt (' + pos.coords.accuracy.toFixed(0) + ' m Genauigkeit)';
|
||||
}, function (err) {
|
||||
status.textContent = '// Fehler: ' + err.message;
|
||||
}, { enableHighAccuracy: true, timeout: 10000 });
|
||||
});
|
||||
// Set current time as default
|
||||
(function () {
|
||||
const input = document.getElementById('entry-time');
|
||||
if (input && !input.value) {
|
||||
const now = new Date();
|
||||
const hh = String(now.getHours()).padStart(2, '0');
|
||||
const mm = String(now.getMinutes()).padStart(2, '0');
|
||||
input.value = hh + ':' + mm;
|
||||
}
|
||||
})();
|
||||
84
backend/internal/api/static/editor.js
Normal file
84
backend/internal/api/static/editor.js
Normal file
@@ -0,0 +1,84 @@
|
||||
"use strict";
|
||||
(function () {
|
||||
'use strict';
|
||||
function initEditor(ta) {
|
||||
var _a, _b;
|
||||
async function upload(file) {
|
||||
var _a;
|
||||
const form = new FormData();
|
||||
form.append('file', file);
|
||||
const statusEl = (_a = ta.parentElement) === null || _a === void 0 ? void 0 : _a.querySelector('.upload-status');
|
||||
if (statusEl)
|
||||
statusEl.textContent = '↑ ' + file.name + ' …';
|
||||
try {
|
||||
const res = await fetch('/media', { method: 'POST', body: form });
|
||||
if (!res.ok) {
|
||||
if (statusEl)
|
||||
statusEl.textContent = '✗ Fehler beim Hochladen';
|
||||
return;
|
||||
}
|
||||
const data = await res.json();
|
||||
insertAtCursor('\n' + data.ref + '\n');
|
||||
if (statusEl)
|
||||
statusEl.textContent = '';
|
||||
}
|
||||
catch (_e) {
|
||||
if (statusEl)
|
||||
statusEl.textContent = '✗ Fehler beim Hochladen';
|
||||
}
|
||||
}
|
||||
function insertAtCursor(text) {
|
||||
const start = ta.selectionStart;
|
||||
ta.value = ta.value.slice(0, start) + text + ta.value.slice(ta.selectionEnd);
|
||||
ta.selectionStart = ta.selectionEnd = start + text.length;
|
||||
ta.focus();
|
||||
}
|
||||
// Paste: catch file pastes
|
||||
ta.addEventListener('paste', function (e) {
|
||||
var _a;
|
||||
const items = (_a = e.clipboardData) === null || _a === void 0 ? void 0 : _a.items;
|
||||
if (!items)
|
||||
return;
|
||||
for (let i = 0; i < items.length; i++) {
|
||||
if (items[i].kind === 'file') {
|
||||
e.preventDefault();
|
||||
const file = items[i].getAsFile();
|
||||
if (file)
|
||||
void upload(file);
|
||||
return;
|
||||
}
|
||||
}
|
||||
});
|
||||
// Drag & Drop onto textarea
|
||||
ta.addEventListener('dragover', function (e) {
|
||||
e.preventDefault();
|
||||
ta.classList.add('drag-over');
|
||||
});
|
||||
ta.addEventListener('dragleave', function () {
|
||||
ta.classList.remove('drag-over');
|
||||
});
|
||||
ta.addEventListener('drop', function (e) {
|
||||
var _a;
|
||||
e.preventDefault();
|
||||
ta.classList.remove('drag-over');
|
||||
const files = (_a = e.dataTransfer) === null || _a === void 0 ? void 0 : _a.files;
|
||||
if (!files)
|
||||
return;
|
||||
for (let i = 0; i < files.length; i++)
|
||||
void upload(files[i]);
|
||||
});
|
||||
// File picker button
|
||||
const picker = (_a = ta.parentElement) === null || _a === void 0 ? void 0 : _a.querySelector('.media-picker');
|
||||
const input = (_b = ta.parentElement) === null || _b === void 0 ? void 0 : _b.querySelector('.media-file-input');
|
||||
if (picker && input) {
|
||||
picker.addEventListener('click', function () { input.click(); });
|
||||
input.addEventListener('change', function () {
|
||||
if (!input.files)
|
||||
return;
|
||||
Array.from(input.files).forEach(f => void upload(f));
|
||||
input.value = '';
|
||||
});
|
||||
}
|
||||
}
|
||||
document.querySelectorAll('textarea[name="description"]').forEach(initEditor);
|
||||
})();
|
||||
105
backend/internal/api/static/style.css
Normal file
105
backend/internal/api/static/style.css
Normal file
@@ -0,0 +1,105 @@
|
||||
/* Font + monochrome override */
|
||||
:root {
|
||||
--pico-font-family: 'Courier New', Courier, monospace;
|
||||
--pico-font-size: 14px;
|
||||
--pico-primary: #111;
|
||||
--pico-primary-background: #111;
|
||||
--pico-primary-border: #111;
|
||||
--pico-primary-hover: #333;
|
||||
--pico-primary-hover-background: #333;
|
||||
--pico-primary-hover-border: #333;
|
||||
--pico-primary-focus: rgba(0,0,0,.25);
|
||||
--pico-primary-inverse: #fff;
|
||||
--pico-primary-underline: rgba(0,0,0,.5);
|
||||
}
|
||||
|
||||
/* Site nav */
|
||||
.site-nav { display: flex; gap: 1.5rem; align-items: baseline; padding: .7rem 0; margin-bottom: 1.5rem; border-bottom: 1px solid var(--pico-muted-border-color); }
|
||||
.site-nav a { font-size: .85rem; text-decoration: none; }
|
||||
.site-nav a:hover { text-decoration: underline; }
|
||||
.nav-btn { background: none; border: none; color: var(--pico-primary); padding: 0; cursor: pointer; font-family: inherit; font-size: .85rem; margin: 0; }
|
||||
.site-nav form { margin: 0; padding: 0; }
|
||||
|
||||
h1 { font-size: 1.4rem; font-weight: normal; letter-spacing: .05em; }
|
||||
h2 { font-size: 1rem; font-weight: normal; letter-spacing: .05em; }
|
||||
|
||||
.err, .error { color: #c00; }
|
||||
.source-gps { color: #060; }
|
||||
.source-manual { color: #888; }
|
||||
|
||||
/* GPS row */
|
||||
.gps-row { display: flex; gap: .4rem; align-items: center; }
|
||||
.gps-row input { flex: 1; margin-bottom: 0; }
|
||||
.gps-row button { white-space: nowrap; margin-bottom: 0; }
|
||||
|
||||
/* Narrow pages (login, register) */
|
||||
.narrow { max-width: 400px; margin-top: 4rem; }
|
||||
|
||||
.thumb { width: 80px; height: 80px; object-fit: cover; border: 1px solid var(--pico-muted-border-color); display: block; }
|
||||
|
||||
/* Journal entry cards */
|
||||
.entry-card {
|
||||
border-left: 3px solid var(--pico-primary);
|
||||
padding: .6rem 1rem;
|
||||
margin-bottom: 1rem;
|
||||
background: var(--pico-card-background-color);
|
||||
border-radius: 0 var(--pico-border-radius) var(--pico-border-radius) 0;
|
||||
}
|
||||
.entry-meta { font-size: .8rem; margin-bottom: .3rem; display: flex; gap: .6rem; align-items: baseline; flex-wrap: wrap; }
|
||||
.entry-edit { margin-left: auto; font-size: .75rem; }
|
||||
.entry-title { font-size: 1rem; margin-bottom: .3rem; }
|
||||
.entry-desc { font-size: .9rem; }
|
||||
.entry-desc p { margin-bottom: .5rem; }
|
||||
.entry-desc img { max-width: 100%; height: auto; display: block; margin: .5rem 0; }
|
||||
.entry-desc video { max-width: 100%; display: block; margin: .5rem 0; }
|
||||
.entry-desc ul,
|
||||
.entry-desc ol { padding-left: 1.2rem; margin-bottom: .5rem; }
|
||||
.entry-desc h1, .entry-desc h2, .entry-desc h3 { font-weight: normal; margin: .8rem 0 .3rem; }
|
||||
.entry-images { display: flex; flex-wrap: wrap; gap: .5rem; margin-top: .5rem; }
|
||||
.media-embed { width: 100%; max-height: 360px; display: block; margin-top: .5rem; }
|
||||
.media-audio { width: 100%; display: block; margin-top: .5rem; }
|
||||
|
||||
/* Public feed */
|
||||
.pub-card { margin-bottom: 2rem; padding-bottom: 1.5rem; border-bottom: 1px solid var(--pico-muted-border-color); }
|
||||
.pub-card:last-of-type { border-bottom: none; }
|
||||
.pub-cover { width: 100%; max-height: 320px; object-fit: cover; display: block; margin-bottom: .7rem; }
|
||||
.pub-meta { display: block; color: var(--pico-muted-color); margin-bottom: .3rem; }
|
||||
.pub-title { display: block; font-size: 1rem; margin-bottom: .4rem; }
|
||||
.pub-desc { margin: 0 0 .4rem; font-size: .9rem; }
|
||||
.pub-desc p { margin-bottom: .5rem; }
|
||||
.pub-desc img { max-width: 100%; height: auto; display: block; margin: .5rem 0; }
|
||||
.pub-desc video { max-width: 100%; display: block; margin: .5rem 0; }
|
||||
.pub-tags { margin-top: .3rem; }
|
||||
|
||||
/* Login */
|
||||
.login-box { max-width: 360px; margin: 4rem auto; }
|
||||
|
||||
/* Hashtags */
|
||||
.hashtags { margin-top: .3rem; }
|
||||
.tag { font-size: .75rem; background: var(--pico-muted-background-color); padding: .1rem .4rem; border-radius: 999px; margin-right: .2rem; }
|
||||
|
||||
/* Visibility badge */
|
||||
.badge-public { font-size: .7rem; background: #264; color: #8f8; padding: .1rem .4rem; border-radius: 4px; vertical-align: middle; }
|
||||
|
||||
/* Background music player */
|
||||
#bg-bar { display: none; position: fixed; bottom: 0; left: 0; right: 0; background: var(--pico-background-color); border-top: 1px solid var(--pico-muted-border-color); padding: .4rem 1rem; gap: .8rem; align-items: center; z-index: 100; font-size: .8rem; }
|
||||
#bg-title { flex: 1; overflow: hidden; text-overflow: ellipsis; white-space: nowrap; color: var(--pico-muted-color); }
|
||||
#bg-play, #bg-close { background: none; border: none; cursor: pointer; padding: 0 .3rem; font-size: .9rem; margin: 0; }
|
||||
.btn-bg-music { font-size: .75rem; padding: .15rem .5rem; background: none; border: 1px solid var(--pico-muted-border-color); cursor: pointer; margin-top: .3rem; display: block; }
|
||||
|
||||
/* Editor */
|
||||
.editor-wrap textarea { margin-bottom: 0; border-bottom: none; border-radius: var(--pico-border-radius) var(--pico-border-radius) 0 0; }
|
||||
.editor-wrap textarea.drag-over { outline: 2px dashed var(--pico-primary); }
|
||||
.editor-bar { display: flex; align-items: center; gap: .6rem; padding: .3rem .5rem; border: 1px solid var(--pico-form-element-border-color); border-top: none; border-radius: 0 0 var(--pico-border-radius) var(--pico-border-radius); margin-bottom: 1rem; background: var(--pico-form-element-background-color); }
|
||||
.editor-bar button { font-size: .78rem; padding: .15rem .5rem; background: none; border: 1px solid var(--pico-muted-border-color); cursor: pointer; margin: 0; }
|
||||
.upload-status { font-size: .78rem; color: var(--pico-muted-color); }
|
||||
|
||||
/* Media reference rows (edit form) */
|
||||
.media-refs { margin-bottom: 1rem; display: flex; flex-direction: column; gap: .5rem; }
|
||||
.media-ref-row { display: flex; align-items: center; gap: .6rem; flex-wrap: wrap; }
|
||||
.media-ref-code { font-size: .75rem; background: var(--pico-muted-background-color); padding: .2rem .4rem; flex: 1; overflow: hidden; text-overflow: ellipsis; white-space: nowrap; }
|
||||
.btn-insert { font-size: .75rem; padding: .2rem .5rem; background: none; border: 1px solid var(--pico-muted-border-color); cursor: pointer; white-space: nowrap; }
|
||||
|
||||
/* Delete button */
|
||||
.btn-delete { background: none; border: 1px solid #c44; color: #c44; padding: .2rem .6rem; cursor: pointer; font-size: .8rem; border-radius: 4px; }
|
||||
.btn-delete:hover { background: #c44; color: #fff; }
|
||||
33
backend/internal/api/templates/admin/entries.html
Normal file
33
backend/internal/api/templates/admin/entries.html
Normal file
@@ -0,0 +1,33 @@
|
||||
{{define "admin_title"}}Einträge verwalten — Admin{{end}}
|
||||
|
||||
{{define "admin_content"}}
|
||||
<h1>Einträge</h1>
|
||||
<p><a href="/days">→ Neuen Eintrag anlegen (Tagesansicht)</a></p>
|
||||
|
||||
{{if .Entries}}
|
||||
<figure>
|
||||
<table>
|
||||
<thead><tr><th>Datum</th><th>Zeit</th><th>Titel</th><th>Sichtbarkeit</th><th>Hashtags</th></tr></thead>
|
||||
<tbody>
|
||||
{{range .Entries}}
|
||||
<tr>
|
||||
<td><a href="/days/{{.EntryDate}}">{{.EntryDate}}</a></td>
|
||||
<td>{{.EntryTime}}</td>
|
||||
<td>{{if .Title}}{{.Title}}{{else}}<small>—</small>{{end}}</td>
|
||||
<td>
|
||||
{{if eq .Visibility "public"}}
|
||||
<span class="badge-public">öffentlich</span>
|
||||
{{else}}
|
||||
<small>privat</small>
|
||||
{{end}}
|
||||
</td>
|
||||
<td><small>{{join .Hashtags ", "}}</small></td>
|
||||
</tr>
|
||||
{{end}}
|
||||
</tbody>
|
||||
</table>
|
||||
</figure>
|
||||
{{else}}
|
||||
<p><small>// Noch keine Einträge</small></p>
|
||||
{{end}}
|
||||
{{end}}
|
||||
25
backend/internal/api/templates/admin/layout.html
Normal file
25
backend/internal/api/templates/admin/layout.html
Normal file
@@ -0,0 +1,25 @@
|
||||
{{define "admin_base"}}<!DOCTYPE html>
|
||||
<html lang="de">
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<title>{{block "admin_title" .}}Admin{{end}}</title>
|
||||
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/@picocss/pico@2/css/pico.classless.slate.min.css">
|
||||
<link rel="stylesheet" href="/static/style.css">
|
||||
</head>
|
||||
<body>
|
||||
<main class="container">
|
||||
<nav>
|
||||
<strong>Admin</strong>
|
||||
<span>
|
||||
<a href="/admin/entries">Einträge</a> ·
|
||||
<a href="/admin/users">Benutzer</a> ·
|
||||
<a href="/days">← App</a>
|
||||
</span>
|
||||
</nav>
|
||||
{{block "admin_content" .}}{{end}}
|
||||
</main>
|
||||
{{block "admin_scripts" .}}{{end}}
|
||||
</body>
|
||||
</html>
|
||||
{{end}}
|
||||
53
backend/internal/api/templates/admin/users.html
Normal file
53
backend/internal/api/templates/admin/users.html
Normal file
@@ -0,0 +1,53 @@
|
||||
{{define "admin_title"}}Benutzer verwalten — Admin{{end}}
|
||||
|
||||
{{define "admin_content"}}
|
||||
<h1>Benutzer</h1>
|
||||
|
||||
{{if .Error}}<p class="error">{{.Error}}</p>{{end}}
|
||||
|
||||
<form method="post" action="/admin/users" style="display:flex;gap:1rem;align-items:flex-end;flex-wrap:wrap">
|
||||
<div>
|
||||
<label>Benutzername</label>
|
||||
<input type="text" name="username" required autocomplete="off">
|
||||
</div>
|
||||
<div>
|
||||
<label>Passwort</label>
|
||||
<input type="password" name="password" required autocomplete="new-password">
|
||||
</div>
|
||||
<button type="submit">Anlegen</button>
|
||||
</form>
|
||||
|
||||
<figure>
|
||||
<table>
|
||||
<thead><tr><th>Benutzername</th><th>Admin</th><th>Erstellt</th><th></th></tr></thead>
|
||||
<tbody>
|
||||
{{range .Users}}
|
||||
<tr>
|
||||
<td>{{.Username}}</td>
|
||||
<td>{{if .IsAdmin}}✓{{end}}</td>
|
||||
<td><small>{{.CreatedAt.Format "2006-01-02"}}</small></td>
|
||||
<td>
|
||||
{{if ne .UserID $.User.UserID}}
|
||||
<button class="btn-delete" data-url="/admin/users/{{.UserID}}" data-name="{{.Username}}">Löschen</button>
|
||||
{{else}}
|
||||
<small>(du)</small>
|
||||
{{end}}
|
||||
</td>
|
||||
</tr>
|
||||
{{end}}
|
||||
</tbody>
|
||||
</table>
|
||||
</figure>
|
||||
{{end}}
|
||||
|
||||
{{define "admin_scripts"}}
|
||||
<script>
|
||||
document.querySelectorAll('.btn-delete').forEach(function(btn) {
|
||||
btn.addEventListener('click', function() {
|
||||
if (!confirm('Benutzer "' + btn.dataset.name + '" löschen?')) return;
|
||||
fetch(btn.dataset.url, {method: 'DELETE'})
|
||||
.then(function() { window.location.reload(); });
|
||||
});
|
||||
});
|
||||
</script>
|
||||
{{end}}
|
||||
26
backend/internal/api/templates/base.html
Normal file
26
backend/internal/api/templates/base.html
Normal file
@@ -0,0 +1,26 @@
|
||||
{{define "base"}}<!DOCTYPE html>
|
||||
<html lang="de">
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<title>{{block "title" .}}Reisejournal{{end}}</title>
|
||||
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/@picocss/pico@2/css/pico.classless.slate.min.css">
|
||||
<link rel="stylesheet" href="/static/style.css">
|
||||
</head>
|
||||
<body>
|
||||
<nav class="site-nav container">
|
||||
<a href="/">Journal</a>
|
||||
{{if .LoggedIn}}
|
||||
<a href="/days">Tage</a>
|
||||
{{if .IsAdmin}}<a href="/admin">Admin</a>{{end}}
|
||||
<form method="post" action="/logout"><button type="submit" class="nav-btn">Ausloggen</button></form>
|
||||
{{else}}
|
||||
<a href="/login">Anmelden</a>
|
||||
{{end}}
|
||||
</nav>
|
||||
{{block "content" .}}{{end}}
|
||||
{{block "scripts" .}}{{end}}
|
||||
<script src="/static/autoplay.js"></script>
|
||||
</body>
|
||||
</html>
|
||||
{{end}}
|
||||
113
backend/internal/api/templates/day.html
Normal file
113
backend/internal/api/templates/day.html
Normal file
@@ -0,0 +1,113 @@
|
||||
{{define "title"}}{{.Date}} — Reisejournal{{end}}
|
||||
|
||||
{{define "content"}}
|
||||
<main class="container">
|
||||
<nav><a href="/days">← Alle Tage</a></nav>
|
||||
<h1>{{.Date}}</h1>
|
||||
|
||||
<form method="post" action="/entries" enctype="multipart/form-data">
|
||||
<input type="hidden" name="date" value="{{.Date}}">
|
||||
<div class="gps-row">
|
||||
<input type="time" name="time" required id="entry-time">
|
||||
<select name="visibility">
|
||||
<option value="private">Privat</option>
|
||||
<option value="public">Öffentlich</option>
|
||||
</select>
|
||||
</div>
|
||||
<input type="text" name="title" placeholder="Überschrift">
|
||||
<div class="editor-wrap">
|
||||
<textarea name="description" rows="6" placeholder="Beschreibung — Markdown unterstützt Medien: per Drag & Drop oder Einfügen (Strg+V)"></textarea>
|
||||
<div class="editor-bar">
|
||||
<button type="button" class="media-picker">📎 Datei anhängen</button>
|
||||
<span class="upload-status"></span>
|
||||
<input type="file" class="media-file-input" multiple accept="image/*,video/*,audio/*" style="display:none">
|
||||
</div>
|
||||
</div>
|
||||
<div class="gps-row">
|
||||
<input type="number" name="lat" id="entry-lat" step="any" placeholder="Breite">
|
||||
<input type="number" name="lon" id="entry-lon" step="any" placeholder="Länge">
|
||||
<button type="button" id="btn-gps">◎ GPS</button>
|
||||
</div>
|
||||
<small id="gps-status"></small>
|
||||
<input type="text" name="hashtags" placeholder="Hashtags (kommagetrennt)">
|
||||
<button type="submit">Speichern</button>
|
||||
</form>
|
||||
|
||||
<h2>Einträge <small>({{len .Entries}})</small></h2>
|
||||
{{range .Entries}}
|
||||
<div class="entry-card">
|
||||
<div class="entry-meta">
|
||||
<strong>{{.EntryTime}}</strong>
|
||||
{{if eq .Visibility "public"}}<span class="badge-public">öffentlich</span>{{end}}
|
||||
{{if .Lat}}<small> · ○ {{printf "%.5f" (deref .Lat)}}, {{printf "%.5f" (deref .Lon)}}</small>{{end}}
|
||||
<a href="/entries/{{.EntryID}}/edit" class="entry-edit">bearbeiten</a>
|
||||
</div>
|
||||
{{if .Title}}<div class="entry-title">{{.Title}}</div>{{end}}
|
||||
{{if .Description}}<div class="entry-desc">{{markdown .Description}}</div>{{end}}
|
||||
{{if .Hashtags}}<div class="hashtags">{{range .Hashtags}}<span class="tag">#{{.}}</span> {{end}}</div>{{end}}
|
||||
{{if .Images}}
|
||||
<div class="entry-images">
|
||||
{{range .Images}}
|
||||
{{if isVideo .MimeType}}
|
||||
<video src="/uploads/{{.Filename}}" controls class="media-embed"></video>
|
||||
{{else if isAudio .MimeType}}
|
||||
<audio src="/uploads/{{.Filename}}" controls class="media-audio"></audio>
|
||||
{{else}}
|
||||
<a href="/uploads/{{.Filename}}" target="_blank">
|
||||
<img src="/uploads/{{.Filename}}" alt="{{.OriginalName}}" class="thumb">
|
||||
</a>
|
||||
{{end}}
|
||||
{{end}}
|
||||
</div>
|
||||
{{end}}
|
||||
</div>
|
||||
{{else}}
|
||||
<p><small>// Noch keine Einträge</small></p>
|
||||
{{end}}
|
||||
|
||||
<h2>Aufenthalte <small>({{len .Stops}})</small></h2>
|
||||
{{if .Stops}}
|
||||
<table>
|
||||
<thead><tr><th>Von</th><th>Bis</th><th>Dauer</th><th>Ort</th></tr></thead>
|
||||
<tbody>
|
||||
{{range .Stops}}
|
||||
<tr>
|
||||
<td>{{.StartTS.Format "15:04"}}</td>
|
||||
<td>{{.EndTS.Format "15:04"}}</td>
|
||||
<td><small>{{divInt .DurationS 60}} min</small></td>
|
||||
<td>{{if .PlaceLabel}}{{.PlaceLabel}}{{else}}<small>—</small>{{end}}</td>
|
||||
</tr>
|
||||
{{end}}
|
||||
</tbody>
|
||||
</table>
|
||||
{{else}}
|
||||
<p><small>// Keine Aufenthalte</small></p>
|
||||
{{end}}
|
||||
|
||||
<details>
|
||||
<summary><small>Trackpunkte ({{len .Points}})</small></summary>
|
||||
<table>
|
||||
<thead><tr><th>Zeit</th><th>Lat</th><th>Lon</th><th>Quelle</th></tr></thead>
|
||||
<tbody>
|
||||
{{range .Points}}
|
||||
<tr>
|
||||
<td>{{.Timestamp.Format "15:04:05"}}</td>
|
||||
<td>{{printf "%.5f" .Lat}}</td>
|
||||
<td>{{printf "%.5f" .Lon}}</td>
|
||||
<td class="source-{{.Source}}">{{.Source}}</td>
|
||||
</tr>
|
||||
{{else}}
|
||||
<tr><td colspan="4"><small>// Keine Punkte</small></td></tr>
|
||||
{{end}}
|
||||
</tbody>
|
||||
</table>
|
||||
</details>
|
||||
</main>
|
||||
{{end}}
|
||||
|
||||
{{define "scripts"}}
|
||||
<script src="/static/day.js"></script>
|
||||
<script src="/static/editor.js"></script>
|
||||
{{end}}
|
||||
|
||||
{{template "base" .}}
|
||||
37
backend/internal/api/templates/days.html
Normal file
37
backend/internal/api/templates/days.html
Normal file
@@ -0,0 +1,37 @@
|
||||
{{define "title"}}Tage — Reisejournal{{end}}
|
||||
|
||||
{{define "content"}}
|
||||
<main class="container">
|
||||
<form method="get" action="/days/redirect" class="gps-row">
|
||||
<input type="date" name="date" id="nav-date" required>
|
||||
<button type="submit">Tag öffnen</button>
|
||||
</form>
|
||||
|
||||
<table>
|
||||
<thead><tr><th>Datum</th><th>Punkte</th><th>Von</th><th>Bis</th></tr></thead>
|
||||
<tbody>
|
||||
{{range .Days}}
|
||||
<tr>
|
||||
<td><a href="/days/{{.Date}}">{{.Date}}</a></td>
|
||||
<td>{{.Count}}</td>
|
||||
<td><small>{{if .FirstTS}}{{.FirstTS.Format "15:04"}}{{end}}</small></td>
|
||||
<td><small>{{if .LastTS}}{{.LastTS.Format "15:04"}}{{end}}</small></td>
|
||||
</tr>
|
||||
{{else}}
|
||||
<tr><td colspan="4"><small>// Keine Daten vorhanden</small></td></tr>
|
||||
{{end}}
|
||||
</tbody>
|
||||
</table>
|
||||
</main>
|
||||
{{end}}
|
||||
|
||||
{{define "scripts"}}
|
||||
<script>
|
||||
var d = document.getElementById('nav-date');
|
||||
if (d && !d.value) {
|
||||
d.value = new Date().toISOString().slice(0, 10);
|
||||
}
|
||||
</script>
|
||||
{{end}}
|
||||
|
||||
{{template "base" .}}
|
||||
59
backend/internal/api/templates/edit_entry.html
Normal file
59
backend/internal/api/templates/edit_entry.html
Normal file
@@ -0,0 +1,59 @@
|
||||
{{define "title"}}Eintrag bearbeiten{{end}}
|
||||
|
||||
{{define "content"}}
|
||||
<main class="container">
|
||||
<nav><a href="/days/{{.Entry.EntryDate}}">← {{.Entry.EntryDate}}</a></nav>
|
||||
<h1>Eintrag bearbeiten</h1>
|
||||
|
||||
<form method="post" action="/entries/{{.Entry.EntryID}}" enctype="multipart/form-data">
|
||||
<div class="gps-row">
|
||||
<input type="time" name="time" required value="{{.Entry.EntryTime}}">
|
||||
<select name="visibility">
|
||||
<option value="private"{{if eq .Entry.Visibility "private"}} selected{{end}}>Privat</option>
|
||||
<option value="public"{{if eq .Entry.Visibility "public"}} selected{{end}}>Öffentlich</option>
|
||||
</select>
|
||||
</div>
|
||||
<input type="text" name="title" placeholder="Überschrift" value="{{.Entry.Title}}">
|
||||
<div class="editor-wrap">
|
||||
<textarea name="description" rows="6" placeholder="Beschreibung — Markdown unterstützt Medien: per Drag & Drop oder Einfügen (Strg+V)">{{.Entry.Description}}</textarea>
|
||||
<div class="editor-bar">
|
||||
<button type="button" class="media-picker">📎 Datei anhängen</button>
|
||||
<span class="upload-status"></span>
|
||||
<input type="file" class="media-file-input" multiple accept="image/*,video/*,audio/*" style="display:none">
|
||||
</div>
|
||||
</div>
|
||||
<div class="gps-row">
|
||||
<input type="number" name="lat" id="entry-lat" step="any" placeholder="Breite"{{if .Entry.Lat}} value="{{printf "%.6f" (deref .Entry.Lat)}}"{{end}}>
|
||||
<input type="number" name="lon" id="entry-lon" step="any" placeholder="Länge"{{if .Entry.Lon}} value="{{printf "%.6f" (deref .Entry.Lon)}}"{{end}}>
|
||||
<button type="button" id="btn-gps">◎ GPS</button>
|
||||
</div>
|
||||
<small id="gps-status"></small>
|
||||
<input type="text" name="hashtags" placeholder="Hashtags (kommagetrennt)" value="{{join .Entry.Hashtags ", "}}">
|
||||
{{if .Entry.Images}}
|
||||
<div class="media-refs">
|
||||
{{range .Entry.Images}}
|
||||
<div class="media-ref-row">
|
||||
{{if isVideo .MimeType}}
|
||||
<code class="media-ref-code"></code>
|
||||
{{else if isAudio .MimeType}}
|
||||
<code class="media-ref-code">[{{.OriginalName}}](/uploads/{{.Filename}})</code>
|
||||
{{else}}
|
||||
<img src="/uploads/{{.Filename}}" alt="{{.OriginalName}}" class="thumb">
|
||||
<code class="media-ref-code"></code>
|
||||
{{end}}
|
||||
<button type="button" class="btn-insert" data-ref="{{if isVideo .MimeType}}{{else if isAudio .MimeType}}[{{.OriginalName}}](/uploads/{{.Filename}}){{else}}{{end}}">↩ einfügen</button>
|
||||
</div>
|
||||
{{end}}
|
||||
</div>
|
||||
{{end}}
|
||||
<button type="submit">Speichern</button>
|
||||
</form>
|
||||
</main>
|
||||
{{end}}
|
||||
|
||||
{{define "scripts"}}
|
||||
<script src="/static/day.js"></script>
|
||||
<script src="/static/editor.js"></script>
|
||||
{{end}}
|
||||
|
||||
{{template "base" .}}
|
||||
21
backend/internal/api/templates/login.html
Normal file
21
backend/internal/api/templates/login.html
Normal file
@@ -0,0 +1,21 @@
|
||||
{{define "title"}}Login — Reisejournal{{end}}
|
||||
|
||||
{{define "content"}}
|
||||
<main class="container">
|
||||
<article class="login-box">
|
||||
<h1>REISEJOURNAL</h1>
|
||||
{{if .Error}}<p class="err">// {{.Error}}</p>{{end}}
|
||||
<form method="post" action="/login">
|
||||
<label>Benutzername
|
||||
<input name="username" autocomplete="username" value="{{.Username}}">
|
||||
</label>
|
||||
<label>Passwort
|
||||
<input type="password" name="password" autocomplete="current-password">
|
||||
</label>
|
||||
<button type="submit">Einloggen</button>
|
||||
</form>
|
||||
</article>
|
||||
</main>
|
||||
{{end}}
|
||||
|
||||
{{template "base" .}}
|
||||
66
backend/internal/api/templates/public.html
Normal file
66
backend/internal/api/templates/public.html
Normal file
@@ -0,0 +1,66 @@
|
||||
{{define "title"}}Journal{{end}}
|
||||
|
||||
{{define "content"}}
|
||||
<main class="container">
|
||||
<div id="feed">
|
||||
{{template "feed_items" .}}
|
||||
</div>
|
||||
</main>
|
||||
{{end}}
|
||||
|
||||
{{define "feed_items"}}
|
||||
{{range .Entries}}
|
||||
<article class="pub-card">
|
||||
{{if .Images}}
|
||||
{{with (index .Images 0)}}
|
||||
{{if isVideo .MimeType}}
|
||||
<video src="/uploads/{{.Filename}}" controls class="media-embed"></video>
|
||||
{{else if isAudio .MimeType}}
|
||||
<audio src="/uploads/{{.Filename}}" controls class="media-audio"></audio>
|
||||
{{else}}
|
||||
<a href="/uploads/{{.Filename}}" target="_blank">
|
||||
<img class="pub-cover" src="/uploads/{{.Filename}}" alt="">
|
||||
</a>
|
||||
{{end}}
|
||||
{{end}}
|
||||
{{end}}
|
||||
<div class="pub-body">
|
||||
<small class="pub-meta">{{.EntryDate}} · {{.EntryTime}}</small>
|
||||
{{if .Title}}<strong class="pub-title">{{.Title}}</strong>{{end}}
|
||||
{{if .Description}}<div class="pub-desc">{{markdown .Description}}</div>{{end}}
|
||||
{{if .Hashtags}}<div class="pub-tags">{{range .Hashtags}}<span class="tag">#{{.}}</span> {{end}}</div>{{end}}
|
||||
</div>
|
||||
</article>
|
||||
{{else}}
|
||||
<p><small>// Noch keine öffentlichen Einträge</small></p>
|
||||
{{end}}
|
||||
{{if .HasMore}}
|
||||
<div id="sentinel" data-offset="{{.Offset}}"></div>
|
||||
{{end}}
|
||||
{{end}}
|
||||
|
||||
{{define "scripts"}}
|
||||
<script>
|
||||
(function() {
|
||||
const sentinel = document.getElementById('sentinel');
|
||||
if (!sentinel) return;
|
||||
const obs = new IntersectionObserver(function(entries) {
|
||||
if (!entries[0].isIntersecting) return;
|
||||
obs.disconnect();
|
||||
fetch('/feed?offset=' + sentinel.dataset.offset)
|
||||
.then(r => r.text())
|
||||
.then(html => {
|
||||
sentinel.remove();
|
||||
const div = document.createElement('div');
|
||||
div.innerHTML = html;
|
||||
document.getElementById('feed').append(...div.childNodes);
|
||||
const next = document.getElementById('sentinel');
|
||||
if (next) obs.observe(next);
|
||||
});
|
||||
});
|
||||
obs.observe(sentinel);
|
||||
})();
|
||||
</script>
|
||||
{{end}}
|
||||
|
||||
{{template "base" .}}
|
||||
20
backend/internal/api/templates/register.html
Normal file
20
backend/internal/api/templates/register.html
Normal file
@@ -0,0 +1,20 @@
|
||||
{{define "title"}}Registrieren — Journal{{end}}
|
||||
|
||||
{{define "content"}}
|
||||
<main class="container narrow">
|
||||
<h1>Konto erstellen</h1>
|
||||
{{if .Error}}<p class="error">{{.Error}}</p>{{end}}
|
||||
<form method="post" action="/register">
|
||||
<label>Benutzername</label>
|
||||
<input type="text" name="username" value="{{.Username}}" required autofocus autocomplete="username">
|
||||
<label>Passwort</label>
|
||||
<input type="password" name="password" required autocomplete="new-password">
|
||||
<label>Passwort bestätigen</label>
|
||||
<input type="password" name="confirm" required autocomplete="new-password">
|
||||
<button type="submit">Registrieren</button>
|
||||
</form>
|
||||
<p><small>Bereits registriert? <a href="/login">Anmelden</a></small></p>
|
||||
</main>
|
||||
{{end}}
|
||||
|
||||
{{template "base" .}}
|
||||
125
backend/internal/api/upload.go
Normal file
125
backend/internal/api/upload.go
Normal file
@@ -0,0 +1,125 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"image"
|
||||
"image/jpeg"
|
||||
_ "image/jpeg"
|
||||
"image/png"
|
||||
_ "image/png"
|
||||
"io"
|
||||
"log/slog"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/disintegration/imaging"
|
||||
"golang.org/x/image/webp"
|
||||
)
|
||||
|
||||
const maxImageDimension = 1920
|
||||
const jpegQuality = 80
|
||||
|
||||
// saveUpload writes an uploaded file to uploadDir, resizing images where applicable.
|
||||
// peeked contains the first bytes already read for MIME detection.
|
||||
// Returns the saved filename (including extension).
|
||||
func saveUpload(uploadDir, baseName, mime string, peeked []byte, rest io.Reader) (string, error) {
|
||||
full := io.MultiReader(bytes.NewReader(peeked), rest)
|
||||
switch mime {
|
||||
case "image/jpeg", "image/png":
|
||||
return saveResizedImage(uploadDir, baseName, mime, full)
|
||||
case "image/webp":
|
||||
return saveResizedWebP(uploadDir, baseName, full)
|
||||
default:
|
||||
ext := allowedMIME[mime]
|
||||
dest := filepath.Join(uploadDir, baseName+ext)
|
||||
return baseName + ext, saveRaw(dest, full)
|
||||
}
|
||||
}
|
||||
|
||||
func saveResizedImage(uploadDir, baseName, mime string, r io.Reader) (string, error) {
|
||||
img, _, err := image.Decode(r)
|
||||
if err != nil {
|
||||
// Fallback: save raw if decode fails
|
||||
slog.Warn("image decode failed, saving raw", "mime", mime, "err", err)
|
||||
return "", err
|
||||
}
|
||||
|
||||
b := img.Bounds()
|
||||
if b.Dx() > maxImageDimension || b.Dy() > maxImageDimension {
|
||||
img = imaging.Fit(img, maxImageDimension, maxImageDimension, imaging.Lanczos)
|
||||
}
|
||||
|
||||
ext := allowedMIME[mime]
|
||||
filename := baseName + ext
|
||||
dest := filepath.Join(uploadDir, filename)
|
||||
out, err := os.Create(dest)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer out.Close()
|
||||
|
||||
switch mime {
|
||||
case "image/jpeg":
|
||||
err = jpeg.Encode(out, img, &jpeg.Options{Quality: jpegQuality})
|
||||
case "image/png":
|
||||
err = png.Encode(out, img)
|
||||
}
|
||||
if err != nil {
|
||||
os.Remove(dest)
|
||||
return "", err
|
||||
}
|
||||
return filename, nil
|
||||
}
|
||||
|
||||
func saveResizedWebP(uploadDir, baseName string, r io.Reader) (string, error) {
|
||||
img, err := webp.Decode(r)
|
||||
if err != nil {
|
||||
slog.Warn("webp decode failed, saving raw", "err", err)
|
||||
return "", err
|
||||
}
|
||||
|
||||
b := img.Bounds()
|
||||
if b.Dx() > maxImageDimension || b.Dy() > maxImageDimension {
|
||||
img = imaging.Fit(img, maxImageDimension, maxImageDimension, imaging.Lanczos)
|
||||
}
|
||||
|
||||
// Re-encode as JPEG (no pure-Go WebP encoder available)
|
||||
filename := baseName + ".jpg"
|
||||
dest := filepath.Join(uploadDir, filename)
|
||||
out, err := os.Create(dest)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer out.Close()
|
||||
|
||||
if err := jpeg.Encode(out, img, &jpeg.Options{Quality: jpegQuality}); err != nil {
|
||||
os.Remove(dest)
|
||||
return "", err
|
||||
}
|
||||
return filename, nil
|
||||
}
|
||||
|
||||
func saveRaw(dest string, r io.Reader) error {
|
||||
out, err := os.Create(dest)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer out.Close()
|
||||
_, err = io.Copy(out, r)
|
||||
return err
|
||||
}
|
||||
|
||||
// mimeCategory returns "image", "video", "audio" or "other".
|
||||
func mimeCategory(mime string) string {
|
||||
switch {
|
||||
case strings.HasPrefix(mime, "image/"):
|
||||
return "image"
|
||||
case strings.HasPrefix(mime, "video/"):
|
||||
return "video"
|
||||
case strings.HasPrefix(mime, "audio/"):
|
||||
return "audio"
|
||||
default:
|
||||
return "other"
|
||||
}
|
||||
}
|
||||
1
backend/internal/api/webapp/index.html
Normal file
1
backend/internal/api/webapp/index.html
Normal file
@@ -0,0 +1 @@
|
||||
<!-- webapp placeholder: replaced at build time by Vite dist -->
|
||||
392
backend/internal/api/webui.go
Normal file
392
backend/internal/api/webui.go
Normal file
@@ -0,0 +1,392 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"embed"
|
||||
"errors"
|
||||
"html/template"
|
||||
"io/fs"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/go-chi/chi/v5"
|
||||
"github.com/yuin/goldmark"
|
||||
"github.com/yuin/goldmark/extension"
|
||||
"github.com/yuin/goldmark/parser"
|
||||
"github.com/yuin/goldmark/renderer/html"
|
||||
"github.com/jacek/pamietnik/backend/internal/auth"
|
||||
"github.com/jacek/pamietnik/backend/internal/db"
|
||||
)
|
||||
|
||||
var md = goldmark.New(
|
||||
goldmark.WithExtensions(extension.GFM),
|
||||
goldmark.WithParserOptions(parser.WithAutoHeadingID()),
|
||||
goldmark.WithRendererOptions(html.WithHardWraps()),
|
||||
)
|
||||
|
||||
func renderMarkdown(src string) template.HTML {
|
||||
var buf bytes.Buffer
|
||||
if err := md.Convert([]byte(src), &buf); err != nil {
|
||||
return template.HTML(template.HTMLEscapeString(src))
|
||||
}
|
||||
return template.HTML(buf.String())
|
||||
}
|
||||
|
||||
//go:embed static templates
|
||||
var assets embed.FS
|
||||
|
||||
var funcMap = template.FuncMap{
|
||||
"divInt": func(a, b int) int { return a / b },
|
||||
"deref": func(p *float64) float64 {
|
||||
if p == nil {
|
||||
return 0
|
||||
}
|
||||
return *p
|
||||
},
|
||||
"join": strings.Join,
|
||||
"isVideo": func(mime string) bool { return strings.HasPrefix(mime, "video/") },
|
||||
"isAudio": func(mime string) bool { return strings.HasPrefix(mime, "audio/") },
|
||||
"isImage": func(mime string) bool { return strings.HasPrefix(mime, "image/") },
|
||||
"markdown": renderMarkdown,
|
||||
}
|
||||
|
||||
var tmpls = template.Must(
|
||||
template.New("").Funcs(funcMap).ParseFS(assets, "templates/*.html"),
|
||||
)
|
||||
|
||||
func staticFS() fs.FS {
|
||||
sub, err := fs.Sub(assets, "static")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return sub
|
||||
}
|
||||
|
||||
// WebUI groups all web UI handlers.
|
||||
type WebUI struct {
|
||||
authStore *auth.Store
|
||||
tpStore *db.TrackpointStore
|
||||
stopStore *db.StopStore
|
||||
journalStore *db.JournalStore
|
||||
userStore *db.UserStore
|
||||
}
|
||||
|
||||
func NewWebUI(a *auth.Store, tp *db.TrackpointStore, st *db.StopStore, j *db.JournalStore, u *db.UserStore) *WebUI {
|
||||
return &WebUI{authStore: a, tpStore: tp, stopStore: st, journalStore: j, userStore: u}
|
||||
}
|
||||
|
||||
func injectNav(r *http.Request, data map[string]any) {
|
||||
user := userFromContext(r.Context())
|
||||
if user.UserID != "" {
|
||||
data["LoggedIn"] = true
|
||||
data["IsAdmin"] = user.IsAdmin
|
||||
}
|
||||
}
|
||||
|
||||
func render(w http.ResponseWriter, r *http.Request, page string, data map[string]any) {
|
||||
injectNav(r, data)
|
||||
t, err := tmpls.Clone()
|
||||
if err == nil {
|
||||
_, err = t.ParseFS(assets, "templates/"+page)
|
||||
}
|
||||
if err != nil {
|
||||
slog.Error("template parse", "page", page, "err", err)
|
||||
http.Error(w, "Template-Fehler", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
var buf bytes.Buffer
|
||||
if err := t.ExecuteTemplate(&buf, "base", data); err != nil {
|
||||
slog.Error("template execute", "page", page, "err", err)
|
||||
http.Error(w, "Template-Fehler", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
w.Header().Set("Content-Type", "text/html; charset=utf-8")
|
||||
_, _ = buf.WriteTo(w)
|
||||
}
|
||||
|
||||
func renderFragment(w http.ResponseWriter, page, block string, data any) {
|
||||
t, err := tmpls.Clone()
|
||||
if err == nil {
|
||||
_, err = t.ParseFS(assets, "templates/"+page)
|
||||
}
|
||||
if err != nil {
|
||||
slog.Error("template parse", "page", page, "err", err)
|
||||
http.Error(w, "Template-Fehler", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
var buf bytes.Buffer
|
||||
if err := t.ExecuteTemplate(&buf, block, data); err != nil {
|
||||
slog.Error("template execute fragment", "page", page, "block", block, "err", err)
|
||||
http.Error(w, "Template-Fehler", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
w.Header().Set("Content-Type", "text/html; charset=utf-8")
|
||||
_, _ = buf.WriteTo(w)
|
||||
}
|
||||
|
||||
func renderAdmin(w http.ResponseWriter, page string, data any) {
|
||||
// Parse layout (admin_base) + specific page fresh each request (no shared state between pages).
|
||||
t, err := template.New("").Funcs(funcMap).ParseFS(assets,
|
||||
"templates/admin/layout.html",
|
||||
"templates/admin/"+page,
|
||||
)
|
||||
if err != nil {
|
||||
slog.Error("template parse admin", "page", page, "err", err)
|
||||
http.Error(w, "Template-Fehler", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
var buf bytes.Buffer
|
||||
if err := t.ExecuteTemplate(&buf, "admin_base", data); err != nil {
|
||||
slog.Error("template execute admin", "page", page, "err", err)
|
||||
http.Error(w, "Template-Fehler", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
w.Header().Set("Content-Type", "text/html; charset=utf-8")
|
||||
_, _ = buf.WriteTo(w)
|
||||
}
|
||||
|
||||
// --- Auth ---
|
||||
|
||||
func (ui *WebUI) HandleGetLogin(w http.ResponseWriter, r *http.Request) {
|
||||
render(w, r, "login.html", map[string]any{"Error": "", "Username": ""})
|
||||
}
|
||||
|
||||
func (ui *WebUI) HandlePostLogin(w http.ResponseWriter, r *http.Request) {
|
||||
if err := r.ParseForm(); err != nil {
|
||||
http.Error(w, "Ungültige Formulardaten", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
username := strings.TrimSpace(r.FormValue("username"))
|
||||
password := r.FormValue("password")
|
||||
|
||||
sess, err := ui.authStore.Login(r.Context(), username, password)
|
||||
if err != nil {
|
||||
msg := "Interner Fehler."
|
||||
if errors.Is(err, auth.ErrInvalidCredentials) {
|
||||
msg = "Ungültige Zugangsdaten."
|
||||
}
|
||||
render(w, r, "login.html", map[string]any{"Error": msg, "Username": username})
|
||||
return
|
||||
}
|
||||
|
||||
http.SetCookie(w, &http.Cookie{
|
||||
Name: sessionCookieName,
|
||||
Value: sess.SessionID,
|
||||
Path: "/",
|
||||
HttpOnly: true,
|
||||
Secure: false,
|
||||
SameSite: http.SameSiteLaxMode,
|
||||
Expires: sess.ExpiresAt,
|
||||
})
|
||||
http.Redirect(w, r, "/days", http.StatusSeeOther)
|
||||
}
|
||||
|
||||
func (ui *WebUI) HandleLogout(w http.ResponseWriter, r *http.Request) {
|
||||
cookie, err := r.Cookie(sessionCookieName)
|
||||
if err == nil {
|
||||
ui.authStore.Logout(r.Context(), cookie.Value)
|
||||
}
|
||||
http.SetCookie(w, &http.Cookie{
|
||||
Name: sessionCookieName,
|
||||
Value: "",
|
||||
Path: "/",
|
||||
MaxAge: -1,
|
||||
Expires: time.Unix(0, 0),
|
||||
})
|
||||
http.Redirect(w, r, "/login", http.StatusSeeOther)
|
||||
}
|
||||
|
||||
// --- Register ---
|
||||
|
||||
func (ui *WebUI) HandleGetRegister(w http.ResponseWriter, r *http.Request) {
|
||||
render(w, r, "register.html", map[string]any{"Error": "", "Username": ""})
|
||||
}
|
||||
|
||||
func (ui *WebUI) HandlePostRegister(w http.ResponseWriter, r *http.Request) {
|
||||
if err := r.ParseForm(); err != nil {
|
||||
http.Error(w, "Ungültige Formulardaten", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
username := strings.TrimSpace(r.FormValue("username"))
|
||||
password := r.FormValue("password")
|
||||
confirm := r.FormValue("confirm")
|
||||
|
||||
if username == "" || password == "" {
|
||||
render(w, r, "register.html", map[string]any{"Error": "Benutzername und Passwort sind Pflichtfelder.", "Username": username})
|
||||
return
|
||||
}
|
||||
if password != confirm {
|
||||
render(w, r, "register.html", map[string]any{"Error": "Passwörter stimmen nicht überein.", "Username": username})
|
||||
return
|
||||
}
|
||||
|
||||
if err := ui.authStore.Register(r.Context(), username, password); err != nil {
|
||||
msg := "Interner Fehler."
|
||||
if errors.Is(err, auth.ErrUsernameTaken) {
|
||||
msg = "Benutzername bereits vergeben."
|
||||
}
|
||||
render(w, r, "register.html", map[string]any{"Error": msg, "Username": username})
|
||||
return
|
||||
}
|
||||
http.Redirect(w, r, "/login", http.StatusSeeOther)
|
||||
}
|
||||
|
||||
// --- Public Feed ---
|
||||
|
||||
func (ui *WebUI) HandleFeed(w http.ResponseWriter, r *http.Request) {
|
||||
entries, err := ui.journalStore.ListPublic(r.Context(), 20, 0)
|
||||
if err != nil {
|
||||
http.Error(w, "Fehler beim Laden", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
render(w, r, "public.html", map[string]any{
|
||||
"Entries": entries,
|
||||
"Offset": 20,
|
||||
"HasMore": len(entries) == 20,
|
||||
})
|
||||
}
|
||||
|
||||
func (ui *WebUI) HandleFeedFragment(w http.ResponseWriter, r *http.Request) {
|
||||
offset, _ := strconv.Atoi(r.URL.Query().Get("offset"))
|
||||
if offset < 0 {
|
||||
offset = 0
|
||||
}
|
||||
entries, err := ui.journalStore.ListPublic(r.Context(), 20, offset)
|
||||
if err != nil {
|
||||
http.Error(w, "Fehler beim Laden", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
renderFragment(w, "public.html", "feed_items", map[string]any{
|
||||
"Entries": entries,
|
||||
"Offset": offset + 20,
|
||||
"HasMore": len(entries) == 20,
|
||||
})
|
||||
}
|
||||
|
||||
// --- Days ---
|
||||
|
||||
func (ui *WebUI) HandleDaysRedirect(w http.ResponseWriter, r *http.Request) {
|
||||
date := strings.TrimSpace(r.URL.Query().Get("date"))
|
||||
if date == "" {
|
||||
http.Redirect(w, r, "/days", http.StatusSeeOther)
|
||||
return
|
||||
}
|
||||
if _, err := time.Parse("2006-01-02", date); err != nil {
|
||||
http.Redirect(w, r, "/days", http.StatusSeeOther)
|
||||
return
|
||||
}
|
||||
http.Redirect(w, r, "/days/"+date, http.StatusSeeOther)
|
||||
}
|
||||
|
||||
func (ui *WebUI) HandleDaysList(w http.ResponseWriter, r *http.Request) {
|
||||
user := userFromContext(r.Context())
|
||||
now := time.Now().UTC()
|
||||
from := now.AddDate(-20, 0, 0).Format("2006-01-02")
|
||||
to := now.AddDate(0, 0, 1).Format("2006-01-02")
|
||||
days, err := ui.tpStore.ListDays(r.Context(), user.UserID, from, to)
|
||||
if err != nil {
|
||||
http.Error(w, "Fehler beim Laden", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
render(w, r, "days.html", map[string]any{"Days": days})
|
||||
}
|
||||
|
||||
func (ui *WebUI) HandleDayDetail(w http.ResponseWriter, r *http.Request) {
|
||||
userID := userIDFromContext(r.Context())
|
||||
|
||||
date := chi.URLParam(r, "date")
|
||||
if date == "" {
|
||||
http.Error(w, "Datum fehlt", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
points, err := ui.tpStore.ListByDate(r.Context(), userID, date)
|
||||
if err != nil {
|
||||
http.Error(w, "Fehler beim Laden", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
stops, err := ui.stopStore.ListByDate(r.Context(), userID, date)
|
||||
if err != nil {
|
||||
http.Error(w, "Fehler beim Laden", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
entries, err := ui.journalStore.ListByDate(r.Context(), userID, date)
|
||||
if err != nil {
|
||||
http.Error(w, "Fehler beim Laden", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
render(w, r, "day.html", map[string]any{
|
||||
"Date": date,
|
||||
"Points": points,
|
||||
"Stops": stops,
|
||||
"Entries": entries,
|
||||
})
|
||||
}
|
||||
|
||||
// --- Admin ---
|
||||
|
||||
func (ui *WebUI) HandleAdminEntries(w http.ResponseWriter, r *http.Request) {
|
||||
user := userFromContext(r.Context())
|
||||
entries, err := ui.journalStore.ListByUser(r.Context(), user.UserID)
|
||||
if err != nil {
|
||||
http.Error(w, "Fehler beim Laden", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
renderAdmin(w, "entries.html", map[string]any{"Entries": entries, "User": user})
|
||||
}
|
||||
|
||||
func (ui *WebUI) HandleAdminUsers(w http.ResponseWriter, r *http.Request) {
|
||||
user := userFromContext(r.Context())
|
||||
users, err := ui.userStore.ListUsers(r.Context())
|
||||
if err != nil {
|
||||
http.Error(w, "Fehler beim Laden", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
renderAdmin(w, "users.html", map[string]any{"Users": users, "User": user, "Error": ""})
|
||||
}
|
||||
|
||||
func (ui *WebUI) HandleAdminCreateUser(w http.ResponseWriter, r *http.Request) {
|
||||
if err := r.ParseForm(); err != nil {
|
||||
http.Error(w, "Ungültige Formulardaten", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
username := strings.TrimSpace(r.FormValue("username"))
|
||||
password := r.FormValue("password")
|
||||
|
||||
user := userFromContext(r.Context())
|
||||
|
||||
if username == "" || password == "" {
|
||||
users, _ := ui.userStore.ListUsers(r.Context())
|
||||
renderAdmin(w, "users.html", map[string]any{"Users": users, "User": user, "Error": "Benutzername und Passwort erforderlich."})
|
||||
return
|
||||
}
|
||||
|
||||
if err := ui.authStore.Register(r.Context(), username, password); err != nil {
|
||||
msg := "Interner Fehler."
|
||||
if errors.Is(err, auth.ErrUsernameTaken) {
|
||||
msg = "Benutzername bereits vergeben."
|
||||
}
|
||||
users, _ := ui.userStore.ListUsers(r.Context())
|
||||
renderAdmin(w, "users.html", map[string]any{"Users": users, "User": user, "Error": msg})
|
||||
return
|
||||
}
|
||||
http.Redirect(w, r, "/admin/users", http.StatusSeeOther)
|
||||
}
|
||||
|
||||
func (ui *WebUI) HandleAdminDeleteUser(w http.ResponseWriter, r *http.Request) {
|
||||
targetID := chi.URLParam(r, "id")
|
||||
currentUser := userFromContext(r.Context())
|
||||
if targetID == currentUser.UserID {
|
||||
http.Error(w, "Eigenen Account nicht löschbar", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
if err := ui.userStore.DeleteUser(r.Context(), targetID); err != nil {
|
||||
http.Error(w, "Fehler beim Löschen", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
http.Redirect(w, r, "/admin/users", http.StatusSeeOther)
|
||||
}
|
||||
172
backend/internal/auth/auth.go
Normal file
172
backend/internal/auth/auth.go
Normal file
@@ -0,0 +1,172 @@
|
||||
package auth
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/rand"
|
||||
"crypto/subtle"
|
||||
"encoding/hex"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/jackc/pgx/v5"
|
||||
"github.com/jackc/pgx/v5/pgxpool"
|
||||
"golang.org/x/crypto/argon2"
|
||||
|
||||
"github.com/jacek/pamietnik/backend/internal/domain"
|
||||
)
|
||||
|
||||
const sessionDuration = 24 * time.Hour
|
||||
|
||||
var ErrInvalidCredentials = errors.New("invalid username or password")
|
||||
var ErrSessionNotFound = errors.New("session not found or expired")
|
||||
var ErrUsernameTaken = errors.New("username already taken")
|
||||
|
||||
type Store struct {
|
||||
pool *pgxpool.Pool
|
||||
}
|
||||
|
||||
func NewStore(pool *pgxpool.Pool) *Store {
|
||||
return &Store{pool: pool}
|
||||
}
|
||||
|
||||
// HashPassword returns an argon2id hash of the password.
|
||||
func HashPassword(password string) (string, error) {
|
||||
salt := make([]byte, 16)
|
||||
if _, err := rand.Read(salt); err != nil {
|
||||
return "", fmt.Errorf("generate salt: %w", err)
|
||||
}
|
||||
hash := argon2.IDKey([]byte(password), salt, 1, 64*1024, 4, 32)
|
||||
return fmt.Sprintf("$argon2id$%x$%x", salt, hash), nil
|
||||
}
|
||||
|
||||
// VerifyPassword checks password against stored hash.
|
||||
// Format: $argon2id$<saltHex>$<hashHex>
|
||||
func VerifyPassword(password, stored string) bool {
|
||||
parts := strings.Split(stored, "$")
|
||||
// ["", "argon2id", "<saltHex>", "<hashHex>"]
|
||||
if len(parts) != 4 || parts[1] != "argon2id" {
|
||||
return false
|
||||
}
|
||||
salt, err := hex.DecodeString(parts[2])
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
expected, err := hex.DecodeString(parts[3])
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
hash := argon2.IDKey([]byte(password), salt, 1, 64*1024, 4, 32)
|
||||
return subtle.ConstantTimeCompare(hash, expected) == 1
|
||||
}
|
||||
|
||||
// Login verifies credentials and creates a session.
|
||||
func (s *Store) Login(ctx context.Context, username, password string) (domain.Session, error) {
|
||||
var user domain.User
|
||||
err := s.pool.QueryRow(ctx,
|
||||
`SELECT user_id, username, password_hash FROM users WHERE username = $1`,
|
||||
username,
|
||||
).Scan(&user.UserID, &user.Username, &user.PasswordHash)
|
||||
if err != nil {
|
||||
if errors.Is(err, pgx.ErrNoRows) {
|
||||
return domain.Session{}, ErrInvalidCredentials
|
||||
}
|
||||
return domain.Session{}, err
|
||||
}
|
||||
|
||||
if !VerifyPassword(password, user.PasswordHash) {
|
||||
return domain.Session{}, ErrInvalidCredentials
|
||||
}
|
||||
|
||||
sessionID, err := newSessionID()
|
||||
if err != nil {
|
||||
return domain.Session{}, fmt.Errorf("create session: %w", err)
|
||||
}
|
||||
now := time.Now().UTC()
|
||||
sess := domain.Session{
|
||||
SessionID: sessionID,
|
||||
UserID: user.UserID,
|
||||
CreatedAt: now,
|
||||
ExpiresAt: now.Add(sessionDuration),
|
||||
}
|
||||
|
||||
_, err = s.pool.Exec(ctx,
|
||||
`INSERT INTO sessions (session_id, user_id, created_at, expires_at)
|
||||
VALUES ($1, $2, $3, $4)`,
|
||||
sess.SessionID, sess.UserID, sess.CreatedAt, sess.ExpiresAt,
|
||||
)
|
||||
if err != nil {
|
||||
return domain.Session{}, err
|
||||
}
|
||||
return sess, nil
|
||||
}
|
||||
|
||||
// GetSession validates a session and returns user_id.
|
||||
func (s *Store) GetSession(ctx context.Context, sessionID string) (domain.Session, error) {
|
||||
var sess domain.Session
|
||||
err := s.pool.QueryRow(ctx,
|
||||
`SELECT session_id, user_id, created_at, expires_at
|
||||
FROM sessions
|
||||
WHERE session_id = $1 AND expires_at > NOW()`,
|
||||
sessionID,
|
||||
).Scan(&sess.SessionID, &sess.UserID, &sess.CreatedAt, &sess.ExpiresAt)
|
||||
if err != nil {
|
||||
if errors.Is(err, pgx.ErrNoRows) {
|
||||
return domain.Session{}, ErrSessionNotFound
|
||||
}
|
||||
return domain.Session{}, err
|
||||
}
|
||||
return sess, nil
|
||||
}
|
||||
|
||||
// Logout deletes a session.
|
||||
func (s *Store) Logout(ctx context.Context, sessionID string) error {
|
||||
_, err := s.pool.Exec(ctx, `DELETE FROM sessions WHERE session_id = $1`, sessionID)
|
||||
if err != nil {
|
||||
return fmt.Errorf("delete session: %w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Register creates a new user account. Returns ErrUsernameTaken if the username is already in use.
|
||||
func (s *Store) Register(ctx context.Context, username, password string) error {
|
||||
hash, err := HashPassword(password)
|
||||
if err != nil {
|
||||
return fmt.Errorf("hash password: %w", err)
|
||||
}
|
||||
_, err = s.pool.Exec(ctx,
|
||||
`INSERT INTO users (username, password_hash) VALUES ($1, $2)`,
|
||||
username, hash,
|
||||
)
|
||||
if err != nil && strings.Contains(err.Error(), "unique") {
|
||||
return ErrUsernameTaken
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// GetUserBySession returns the full user (including is_admin) for a session.
|
||||
func (s *Store) GetUserBySession(ctx context.Context, sessionID string) (domain.User, error) {
|
||||
var u domain.User
|
||||
err := s.pool.QueryRow(ctx,
|
||||
`SELECT u.user_id, u.username, u.is_admin, u.created_at
|
||||
FROM sessions s JOIN users u ON s.user_id = u.user_id
|
||||
WHERE s.session_id = $1 AND s.expires_at > NOW()`,
|
||||
sessionID,
|
||||
).Scan(&u.UserID, &u.Username, &u.IsAdmin, &u.CreatedAt)
|
||||
if err != nil {
|
||||
if errors.Is(err, pgx.ErrNoRows) {
|
||||
return domain.User{}, ErrSessionNotFound
|
||||
}
|
||||
return domain.User{}, err
|
||||
}
|
||||
return u, nil
|
||||
}
|
||||
|
||||
func newSessionID() (string, error) {
|
||||
b := make([]byte, 32)
|
||||
if _, err := rand.Read(b); err != nil {
|
||||
return "", fmt.Errorf("generate session id: %w", err)
|
||||
}
|
||||
return hex.EncodeToString(b), nil
|
||||
}
|
||||
65
backend/internal/auth/auth_test.go
Normal file
65
backend/internal/auth/auth_test.go
Normal file
@@ -0,0 +1,65 @@
|
||||
package auth
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestHashPassword_ProducesVerifiableHash(t *testing.T) {
|
||||
hash, err := HashPassword("secret")
|
||||
if err != nil {
|
||||
t.Fatalf("HashPassword returned error: %v", err)
|
||||
}
|
||||
if hash == "" {
|
||||
t.Fatal("HashPassword returned empty string")
|
||||
}
|
||||
if !VerifyPassword("secret", hash) {
|
||||
t.Error("VerifyPassword returned false for correct password")
|
||||
}
|
||||
}
|
||||
|
||||
func TestHashPassword_TwoCallsProduceDifferentHashes(t *testing.T) {
|
||||
h1, err1 := HashPassword("secret")
|
||||
h2, err2 := HashPassword("secret")
|
||||
if err1 != nil || err2 != nil {
|
||||
t.Fatalf("HashPassword error: %v / %v", err1, err2)
|
||||
}
|
||||
// Different salts → different hashes
|
||||
if h1 == h2 {
|
||||
t.Error("expected distinct hashes for same password (due to random salt), got identical")
|
||||
}
|
||||
}
|
||||
|
||||
func TestVerifyPassword_WrongPassword(t *testing.T) {
|
||||
hash, err := HashPassword("correct")
|
||||
if err != nil {
|
||||
t.Fatalf("HashPassword error: %v", err)
|
||||
}
|
||||
if VerifyPassword("wrong", hash) {
|
||||
t.Error("VerifyPassword returned true for wrong password")
|
||||
}
|
||||
}
|
||||
|
||||
func TestVerifyPassword_EmptyPassword(t *testing.T) {
|
||||
hash, err := HashPassword("notempty")
|
||||
if err != nil {
|
||||
t.Fatalf("HashPassword error: %v", err)
|
||||
}
|
||||
if VerifyPassword("", hash) {
|
||||
t.Error("VerifyPassword returned true for empty password against non-empty hash")
|
||||
}
|
||||
}
|
||||
|
||||
func TestVerifyPassword_MalformedHash(t *testing.T) {
|
||||
cases := []string{
|
||||
"",
|
||||
"notahash",
|
||||
"$wrongalgo$aabb$ccdd",
|
||||
"$argon2id$nothex$ccdd",
|
||||
"$argon2id$aabb$nothex",
|
||||
}
|
||||
for _, h := range cases {
|
||||
if VerifyPassword("secret", h) {
|
||||
t.Errorf("VerifyPassword should return false for malformed hash %q", h)
|
||||
}
|
||||
}
|
||||
}
|
||||
41
backend/internal/db/db.go
Normal file
41
backend/internal/db/db.go
Normal file
@@ -0,0 +1,41 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"context"
|
||||
_ "embed"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/jackc/pgx/v5/pgxpool"
|
||||
)
|
||||
|
||||
//go:embed schema.sql
|
||||
var schema string
|
||||
|
||||
func NewPool(ctx context.Context, dsn string) (*pgxpool.Pool, error) {
|
||||
cfg, err := pgxpool.ParseConfig(dsn)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("parse dsn: %w", err)
|
||||
}
|
||||
cfg.MaxConns = 25
|
||||
cfg.MinConns = 2
|
||||
cfg.MaxConnLifetime = 15 * time.Minute
|
||||
cfg.MaxConnIdleTime = 5 * time.Minute
|
||||
|
||||
pool, err := pgxpool.NewWithConfig(ctx, cfg)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("create pool: %w", err)
|
||||
}
|
||||
if err := pool.Ping(ctx); err != nil {
|
||||
return nil, fmt.Errorf("ping db: %w", err)
|
||||
}
|
||||
return pool, nil
|
||||
}
|
||||
|
||||
// InitSchema applies the embedded schema.sql (idempotent via IF NOT EXISTS).
|
||||
func InitSchema(ctx context.Context, pool *pgxpool.Pool) error {
|
||||
if _, err := pool.Exec(ctx, schema); err != nil {
|
||||
return fmt.Errorf("init schema: %w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
25
backend/internal/db/interfaces.go
Normal file
25
backend/internal/db/interfaces.go
Normal file
@@ -0,0 +1,25 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/jacek/pamietnik/backend/internal/domain"
|
||||
)
|
||||
|
||||
// TrackpointStorer is the interface consumed by HTTP handlers.
|
||||
// The concrete TrackpointStore satisfies it.
|
||||
type TrackpointStorer interface {
|
||||
UpsertBatch(ctx context.Context, userID string, points []domain.Trackpoint) (accepted []string, rejected []RejectedItem, err error)
|
||||
ListByDate(ctx context.Context, userID, date string) ([]domain.Trackpoint, error)
|
||||
ListDays(ctx context.Context, userID, from, to string) ([]domain.DaySummary, error)
|
||||
}
|
||||
|
||||
// StopStorer is the interface consumed by HTTP handlers.
|
||||
type StopStorer interface {
|
||||
ListByDate(ctx context.Context, userID, date string) ([]domain.Stop, error)
|
||||
}
|
||||
|
||||
// SuggestionStorer is the interface consumed by HTTP handlers.
|
||||
type SuggestionStorer interface {
|
||||
ListByDate(ctx context.Context, userID, date string) ([]domain.Suggestion, error)
|
||||
}
|
||||
188
backend/internal/db/journal.go
Normal file
188
backend/internal/db/journal.go
Normal file
@@ -0,0 +1,188 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/jackc/pgx/v5"
|
||||
"github.com/jackc/pgx/v5/pgxpool"
|
||||
|
||||
"github.com/jacek/pamietnik/backend/internal/domain"
|
||||
)
|
||||
|
||||
type JournalStore struct {
|
||||
pool *pgxpool.Pool
|
||||
}
|
||||
|
||||
func NewJournalStore(pool *pgxpool.Pool) *JournalStore {
|
||||
return &JournalStore{pool: pool}
|
||||
}
|
||||
|
||||
// InsertEntry creates a new journal entry and returns it with the generated entry_id.
|
||||
func (s *JournalStore) InsertEntry(ctx context.Context, e domain.JournalEntry) (domain.JournalEntry, error) {
|
||||
if e.Visibility == "" {
|
||||
e.Visibility = "private"
|
||||
}
|
||||
if e.Hashtags == nil {
|
||||
e.Hashtags = []string{}
|
||||
}
|
||||
err := s.pool.QueryRow(ctx,
|
||||
`INSERT INTO journal_entries (user_id, entry_date, entry_time, title, description, lat, lon, visibility, hashtags)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)
|
||||
RETURNING entry_id, created_at`,
|
||||
e.UserID, e.EntryDate, e.EntryTime, e.Title, e.Description, e.Lat, e.Lon, e.Visibility, e.Hashtags,
|
||||
).Scan(&e.EntryID, &e.CreatedAt)
|
||||
return e, err
|
||||
}
|
||||
|
||||
// InsertImage attaches an image record to an entry.
|
||||
func (s *JournalStore) InsertImage(ctx context.Context, img domain.JournalImage) (domain.JournalImage, error) {
|
||||
err := s.pool.QueryRow(ctx,
|
||||
`INSERT INTO journal_images (entry_id, filename, original_name, mime_type, size_bytes)
|
||||
VALUES ($1, $2, $3, $4, $5)
|
||||
RETURNING image_id, created_at`,
|
||||
img.EntryID, img.Filename, img.OriginalName, img.MimeType, img.SizeBytes,
|
||||
).Scan(&img.ImageID, &img.CreatedAt)
|
||||
return img, err
|
||||
}
|
||||
|
||||
// GetEntry returns a single entry by ID, verifying ownership.
|
||||
func (s *JournalStore) GetEntry(ctx context.Context, entryID, userID string) (domain.JournalEntry, error) {
|
||||
var e domain.JournalEntry
|
||||
err := s.pool.QueryRow(ctx,
|
||||
`SELECT entry_id, user_id, entry_date::text, entry_time::text, title, description, lat, lon, visibility, hashtags, created_at
|
||||
FROM journal_entries WHERE entry_id = $1 AND user_id = $2`,
|
||||
entryID, userID,
|
||||
).Scan(&e.EntryID, &e.UserID, &e.EntryDate, &e.EntryTime,
|
||||
&e.Title, &e.Description, &e.Lat, &e.Lon, &e.Visibility, &e.Hashtags, &e.CreatedAt)
|
||||
return e, err
|
||||
}
|
||||
|
||||
// UpdateEntry updates mutable fields of an existing entry.
|
||||
func (s *JournalStore) UpdateEntry(ctx context.Context, e domain.JournalEntry) error {
|
||||
if e.Hashtags == nil {
|
||||
e.Hashtags = []string{}
|
||||
}
|
||||
_, err := s.pool.Exec(ctx,
|
||||
`UPDATE journal_entries
|
||||
SET entry_time = $1, title = $2, description = $3, lat = $4, lon = $5, visibility = $6, hashtags = $7
|
||||
WHERE entry_id = $8 AND user_id = $9`,
|
||||
e.EntryTime, e.Title, e.Description, e.Lat, e.Lon, e.Visibility, e.Hashtags,
|
||||
e.EntryID, e.UserID,
|
||||
)
|
||||
return err
|
||||
}
|
||||
|
||||
// ListByDate returns all journal entries for a given date (YYYY-MM-DD), including their images.
|
||||
func (s *JournalStore) ListByDate(ctx context.Context, userID, date string) ([]domain.JournalEntry, error) {
|
||||
rows, err := s.pool.Query(ctx,
|
||||
`SELECT entry_id, user_id, entry_date::text, entry_time::text, title, description, lat, lon, visibility, hashtags, created_at
|
||||
FROM journal_entries
|
||||
WHERE user_id = $1 AND entry_date = $2
|
||||
ORDER BY entry_time`,
|
||||
userID, date,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
entries, err := collectEntries(rows)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return s.attachImages(ctx, entries)
|
||||
}
|
||||
|
||||
// ListPublic returns public journal entries ordered by created_at DESC, for infinite scroll.
|
||||
func (s *JournalStore) ListPublic(ctx context.Context, limit, offset int) ([]domain.JournalEntry, error) {
|
||||
rows, err := s.pool.Query(ctx,
|
||||
`SELECT entry_id, user_id, entry_date::text, entry_time::text, title, description, lat, lon, visibility, hashtags, created_at
|
||||
FROM journal_entries
|
||||
WHERE visibility = 'public'
|
||||
ORDER BY created_at DESC
|
||||
LIMIT $1 OFFSET $2`,
|
||||
limit, offset,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
entries, err := collectEntries(rows)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return s.attachImages(ctx, entries)
|
||||
}
|
||||
|
||||
// ListByUser returns all entries for a user, ordered by entry_date DESC, entry_time DESC.
|
||||
func (s *JournalStore) ListByUser(ctx context.Context, userID string) ([]domain.JournalEntry, error) {
|
||||
rows, err := s.pool.Query(ctx,
|
||||
`SELECT entry_id, user_id, entry_date::text, entry_time::text, title, description, lat, lon, visibility, hashtags, created_at
|
||||
FROM journal_entries
|
||||
WHERE user_id = $1
|
||||
ORDER BY entry_date DESC, entry_time DESC`,
|
||||
userID,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
entries, err := collectEntries(rows)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return s.attachImages(ctx, entries)
|
||||
}
|
||||
|
||||
func collectEntries(rows pgx.Rows) ([]domain.JournalEntry, error) {
|
||||
var entries []domain.JournalEntry
|
||||
for rows.Next() {
|
||||
var e domain.JournalEntry
|
||||
if err := rows.Scan(
|
||||
&e.EntryID, &e.UserID, &e.EntryDate, &e.EntryTime,
|
||||
&e.Title, &e.Description, &e.Lat, &e.Lon, &e.Visibility, &e.Hashtags, &e.CreatedAt,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
entries = append(entries, e)
|
||||
}
|
||||
return entries, rows.Err()
|
||||
}
|
||||
|
||||
// attachImages loads images for the given entries in a single query and populates .Images.
|
||||
func (s *JournalStore) attachImages(ctx context.Context, entries []domain.JournalEntry) ([]domain.JournalEntry, error) {
|
||||
if len(entries) == 0 {
|
||||
return entries, nil
|
||||
}
|
||||
entryIDs := make([]string, len(entries))
|
||||
for i, e := range entries {
|
||||
entryIDs[i] = e.EntryID
|
||||
}
|
||||
imgRows, err := s.pool.Query(ctx,
|
||||
`SELECT image_id, entry_id, filename, original_name, mime_type, size_bytes, created_at
|
||||
FROM journal_images WHERE entry_id = ANY($1) ORDER BY created_at`,
|
||||
entryIDs,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer imgRows.Close()
|
||||
|
||||
imgMap := make(map[string][]domain.JournalImage)
|
||||
for imgRows.Next() {
|
||||
var img domain.JournalImage
|
||||
if err := imgRows.Scan(
|
||||
&img.ImageID, &img.EntryID, &img.Filename, &img.OriginalName,
|
||||
&img.MimeType, &img.SizeBytes, &img.CreatedAt,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
imgMap[img.EntryID] = append(imgMap[img.EntryID], img)
|
||||
}
|
||||
if err := imgRows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for i, e := range entries {
|
||||
entries[i].Images = imgMap[e.EntryID]
|
||||
}
|
||||
return entries, nil
|
||||
}
|
||||
98
backend/internal/db/schema.sql
Normal file
98
backend/internal/db/schema.sql
Normal file
@@ -0,0 +1,98 @@
|
||||
-- Pamietnik database schema
|
||||
-- Applied automatically at server startup via CREATE TABLE IF NOT EXISTS.
|
||||
|
||||
CREATE TABLE IF NOT EXISTS users (
|
||||
user_id TEXT PRIMARY KEY DEFAULT gen_random_uuid()::text,
|
||||
username TEXT NOT NULL UNIQUE,
|
||||
password_hash TEXT NOT NULL,
|
||||
is_admin BOOLEAN NOT NULL DEFAULT false,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS sessions (
|
||||
session_id TEXT PRIMARY KEY,
|
||||
user_id TEXT NOT NULL REFERENCES users(user_id) ON DELETE CASCADE,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
expires_at TIMESTAMPTZ NOT NULL
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS sessions_expires_at_idx ON sessions(expires_at);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS devices (
|
||||
device_id TEXT PRIMARY KEY,
|
||||
user_id TEXT NOT NULL REFERENCES users(user_id) ON DELETE CASCADE,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS trackpoints (
|
||||
id BIGSERIAL PRIMARY KEY,
|
||||
event_id TEXT NOT NULL,
|
||||
device_id TEXT NOT NULL,
|
||||
trip_id TEXT NOT NULL DEFAULT '',
|
||||
ts TIMESTAMPTZ NOT NULL,
|
||||
lat DOUBLE PRECISION NOT NULL,
|
||||
lon DOUBLE PRECISION NOT NULL,
|
||||
source TEXT NOT NULL DEFAULT 'gps',
|
||||
note TEXT NOT NULL DEFAULT '',
|
||||
accuracy_m DOUBLE PRECISION,
|
||||
speed_mps DOUBLE PRECISION,
|
||||
bearing_deg DOUBLE PRECISION,
|
||||
altitude_m DOUBLE PRECISION,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
|
||||
CONSTRAINT trackpoints_device_event_uniq UNIQUE (device_id, event_id)
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS trackpoints_device_ts_idx ON trackpoints(device_id, ts);
|
||||
CREATE INDEX IF NOT EXISTS trackpoints_ts_idx ON trackpoints(ts);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS stops (
|
||||
stop_id TEXT PRIMARY KEY DEFAULT gen_random_uuid()::text,
|
||||
device_id TEXT NOT NULL,
|
||||
trip_id TEXT NOT NULL DEFAULT '',
|
||||
start_ts TIMESTAMPTZ NOT NULL,
|
||||
end_ts TIMESTAMPTZ NOT NULL,
|
||||
center_lat DOUBLE PRECISION NOT NULL,
|
||||
center_lon DOUBLE PRECISION NOT NULL,
|
||||
duration_s INT NOT NULL,
|
||||
place_label TEXT,
|
||||
place_details JSONB,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS stops_device_start_ts_idx ON stops(device_id, start_ts);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS suggestions (
|
||||
suggestion_id TEXT PRIMARY KEY DEFAULT gen_random_uuid()::text,
|
||||
stop_id TEXT NOT NULL REFERENCES stops(stop_id) ON DELETE CASCADE,
|
||||
type TEXT NOT NULL,
|
||||
title TEXT NOT NULL DEFAULT '',
|
||||
text TEXT NOT NULL DEFAULT '',
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
dismissed_at TIMESTAMPTZ
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS suggestions_stop_id_idx ON suggestions(stop_id);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS journal_entries (
|
||||
entry_id TEXT PRIMARY KEY DEFAULT gen_random_uuid()::text,
|
||||
user_id TEXT NOT NULL REFERENCES users(user_id) ON DELETE CASCADE,
|
||||
entry_date DATE NOT NULL,
|
||||
entry_time TIME NOT NULL,
|
||||
title TEXT NOT NULL DEFAULT '',
|
||||
description TEXT NOT NULL DEFAULT '',
|
||||
lat DOUBLE PRECISION,
|
||||
lon DOUBLE PRECISION,
|
||||
visibility TEXT NOT NULL DEFAULT 'private',
|
||||
hashtags TEXT[] NOT NULL DEFAULT '{}',
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS journal_entries_user_date_idx ON journal_entries(user_id, entry_date);
|
||||
CREATE INDEX IF NOT EXISTS journal_entries_public_idx ON journal_entries(visibility, created_at DESC);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS journal_images (
|
||||
image_id TEXT PRIMARY KEY DEFAULT gen_random_uuid()::text,
|
||||
entry_id TEXT NOT NULL REFERENCES journal_entries(entry_id) ON DELETE CASCADE,
|
||||
filename TEXT NOT NULL,
|
||||
original_name TEXT NOT NULL DEFAULT '',
|
||||
mime_type TEXT NOT NULL DEFAULT '',
|
||||
size_bytes BIGINT NOT NULL DEFAULT 0,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS journal_images_entry_id_idx ON journal_images(entry_id);
|
||||
60
backend/internal/db/stops.go
Normal file
60
backend/internal/db/stops.go
Normal file
@@ -0,0 +1,60 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/jackc/pgx/v5"
|
||||
"github.com/jackc/pgx/v5/pgxpool"
|
||||
|
||||
"github.com/jacek/pamietnik/backend/internal/domain"
|
||||
)
|
||||
|
||||
type StopStore struct {
|
||||
pool *pgxpool.Pool
|
||||
}
|
||||
|
||||
func NewStopStore(pool *pgxpool.Pool) *StopStore {
|
||||
return &StopStore{pool: pool}
|
||||
}
|
||||
|
||||
func (s *StopStore) ListByDate(ctx context.Context, userID, date string) ([]domain.Stop, error) {
|
||||
rows, err := s.pool.Query(ctx, `
|
||||
SELECT st.stop_id, st.device_id, st.trip_id,
|
||||
st.start_ts, st.end_ts,
|
||||
st.center_lat, st.center_lon, st.duration_s,
|
||||
COALESCE(st.place_label, ''),
|
||||
st.place_details
|
||||
FROM stops st
|
||||
JOIN devices d ON d.device_id = st.device_id
|
||||
WHERE d.user_id = $1
|
||||
AND DATE(st.start_ts AT TIME ZONE 'UTC') = $2::date
|
||||
ORDER BY st.start_ts`,
|
||||
userID, date,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
return pgx.CollectRows(rows, func(row pgx.CollectableRow) (domain.Stop, error) {
|
||||
var st domain.Stop
|
||||
err := row.Scan(
|
||||
&st.StopID, &st.DeviceID, &st.TripID,
|
||||
&st.StartTS, &st.EndTS,
|
||||
&st.CenterLat, &st.CenterLon, &st.DurationS,
|
||||
&st.PlaceLabel, &st.PlaceDetails,
|
||||
)
|
||||
return st, err
|
||||
})
|
||||
}
|
||||
|
||||
func (s *StopStore) Insert(ctx context.Context, st domain.Stop) error {
|
||||
_, err := s.pool.Exec(ctx, `
|
||||
INSERT INTO stops (stop_id, device_id, trip_id, start_ts, end_ts,
|
||||
center_lat, center_lon, duration_s, place_label, place_details)
|
||||
VALUES ($1,$2,$3,$4,$5,$6,$7,$8,$9,$10)
|
||||
ON CONFLICT (stop_id) DO NOTHING`,
|
||||
st.StopID, st.DeviceID, st.TripID, st.StartTS, st.EndTS,
|
||||
st.CenterLat, st.CenterLon, st.DurationS, st.PlaceLabel, st.PlaceDetails,
|
||||
)
|
||||
return err
|
||||
}
|
||||
54
backend/internal/db/suggestions.go
Normal file
54
backend/internal/db/suggestions.go
Normal file
@@ -0,0 +1,54 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/jackc/pgx/v5"
|
||||
"github.com/jackc/pgx/v5/pgxpool"
|
||||
|
||||
"github.com/jacek/pamietnik/backend/internal/domain"
|
||||
)
|
||||
|
||||
type SuggestionStore struct {
|
||||
pool *pgxpool.Pool
|
||||
}
|
||||
|
||||
func NewSuggestionStore(pool *pgxpool.Pool) *SuggestionStore {
|
||||
return &SuggestionStore{pool: pool}
|
||||
}
|
||||
|
||||
func (s *SuggestionStore) ListByDate(ctx context.Context, userID, date string) ([]domain.Suggestion, error) {
|
||||
rows, err := s.pool.Query(ctx, `
|
||||
SELECT sg.suggestion_id, sg.stop_id, sg.type, sg.title, sg.text,
|
||||
sg.created_at, sg.dismissed_at
|
||||
FROM suggestions sg
|
||||
JOIN stops st ON st.stop_id = sg.stop_id
|
||||
JOIN devices d ON d.device_id = st.device_id
|
||||
WHERE d.user_id = $1
|
||||
AND DATE(st.start_ts AT TIME ZONE 'UTC') = $2::date
|
||||
ORDER BY sg.created_at`,
|
||||
userID, date,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
return pgx.CollectRows(rows, func(row pgx.CollectableRow) (domain.Suggestion, error) {
|
||||
var sg domain.Suggestion
|
||||
err := row.Scan(
|
||||
&sg.SuggestionID, &sg.StopID, &sg.Type, &sg.Title, &sg.Text,
|
||||
&sg.CreatedAt, &sg.DismissedAt,
|
||||
)
|
||||
return sg, err
|
||||
})
|
||||
}
|
||||
|
||||
func (s *SuggestionStore) Insert(ctx context.Context, sg domain.Suggestion) error {
|
||||
_, err := s.pool.Exec(ctx, `
|
||||
INSERT INTO suggestions (suggestion_id, stop_id, type, title, text, created_at)
|
||||
VALUES ($1,$2,$3,$4,$5,$6)
|
||||
ON CONFLICT (suggestion_id) DO NOTHING`,
|
||||
sg.SuggestionID, sg.StopID, sg.Type, sg.Title, sg.Text, sg.CreatedAt,
|
||||
)
|
||||
return err
|
||||
}
|
||||
163
backend/internal/db/trackpoints.go
Normal file
163
backend/internal/db/trackpoints.go
Normal file
@@ -0,0 +1,163 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
|
||||
"github.com/jackc/pgx/v5"
|
||||
"github.com/jackc/pgx/v5/pgxpool"
|
||||
|
||||
"github.com/jacek/pamietnik/backend/internal/domain"
|
||||
)
|
||||
|
||||
type TrackpointStore struct {
|
||||
pool *pgxpool.Pool
|
||||
}
|
||||
|
||||
func NewTrackpointStore(pool *pgxpool.Pool) *TrackpointStore {
|
||||
return &TrackpointStore{pool: pool}
|
||||
}
|
||||
|
||||
// UpsertBatch inserts trackpoints, ignoring duplicates (idempotency via device_id + event_id).
|
||||
// Returns accepted event_ids and rejected items with reason.
|
||||
func (s *TrackpointStore) UpsertBatch(ctx context.Context, userID string, points []domain.Trackpoint) (accepted []string, rejected []RejectedItem, err error) {
|
||||
// First pass: validate all points
|
||||
var valid []domain.Trackpoint
|
||||
for _, p := range points {
|
||||
if vErr := validateTrackpoint(p); vErr != nil {
|
||||
rejected = append(rejected, RejectedItem{
|
||||
EventID: p.EventID,
|
||||
Code: "VALIDATION_ERROR",
|
||||
Message: vErr.Error(),
|
||||
})
|
||||
continue
|
||||
}
|
||||
valid = append(valid, p)
|
||||
}
|
||||
|
||||
if len(valid) == 0 {
|
||||
return accepted, rejected, nil
|
||||
}
|
||||
|
||||
// Ensure devices in a single batch (deduplicated)
|
||||
if userID != "" {
|
||||
seen := make(map[string]bool)
|
||||
batch := &pgx.Batch{}
|
||||
for _, p := range valid {
|
||||
if !seen[p.DeviceID] {
|
||||
seen[p.DeviceID] = true
|
||||
batch.Queue(
|
||||
`INSERT INTO devices (device_id, user_id) VALUES ($1, $2) ON CONFLICT (device_id) DO NOTHING`,
|
||||
p.DeviceID, userID,
|
||||
)
|
||||
}
|
||||
}
|
||||
br := s.pool.SendBatch(ctx, batch)
|
||||
if closeErr := br.Close(); closeErr != nil {
|
||||
return accepted, rejected, fmt.Errorf("ensure devices: %w", closeErr)
|
||||
}
|
||||
}
|
||||
|
||||
// Insert trackpoints
|
||||
for _, p := range valid {
|
||||
_, err := s.pool.Exec(ctx, `
|
||||
INSERT INTO trackpoints (
|
||||
event_id, device_id, trip_id, ts,
|
||||
lat, lon, source, note,
|
||||
accuracy_m, speed_mps, bearing_deg, altitude_m
|
||||
) VALUES ($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12)
|
||||
ON CONFLICT (device_id, event_id) DO NOTHING`,
|
||||
p.EventID, p.DeviceID, p.TripID, p.Timestamp,
|
||||
p.Lat, p.Lon, p.Source, p.Note,
|
||||
p.AccuracyM, p.SpeedMps, p.BearingDeg, p.AltitudeM,
|
||||
)
|
||||
if err != nil {
|
||||
rejected = append(rejected, RejectedItem{
|
||||
EventID: p.EventID,
|
||||
Code: "DB_ERROR",
|
||||
Message: "database error",
|
||||
})
|
||||
continue
|
||||
}
|
||||
accepted = append(accepted, p.EventID)
|
||||
}
|
||||
return accepted, rejected, nil
|
||||
}
|
||||
|
||||
type RejectedItem struct {
|
||||
EventID string `json:"event_id"`
|
||||
Code string `json:"code"`
|
||||
Message string `json:"message"`
|
||||
}
|
||||
|
||||
func validateTrackpoint(p domain.Trackpoint) error {
|
||||
if p.EventID == "" {
|
||||
return errors.New("event_id is required")
|
||||
}
|
||||
if p.DeviceID == "" {
|
||||
return errors.New("device_id is required")
|
||||
}
|
||||
if p.Lat < -90 || p.Lat > 90 {
|
||||
return errors.New("lat out of range")
|
||||
}
|
||||
if p.Lon < -180 || p.Lon > 180 {
|
||||
return errors.New("lon out of range")
|
||||
}
|
||||
if p.Source != "" && p.Source != "gps" && p.Source != "manual" {
|
||||
return errors.New("source must be 'gps' or 'manual'")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *TrackpointStore) ListByDate(ctx context.Context, userID, date string) ([]domain.Trackpoint, error) {
|
||||
rows, err := s.pool.Query(ctx, `
|
||||
SELECT tp.event_id, tp.device_id, tp.trip_id, tp.ts,
|
||||
tp.lat, tp.lon, tp.source, tp.note,
|
||||
tp.accuracy_m, tp.speed_mps, tp.bearing_deg, tp.altitude_m
|
||||
FROM trackpoints tp
|
||||
JOIN devices d ON d.device_id = tp.device_id
|
||||
WHERE d.user_id = $1
|
||||
AND DATE(tp.ts AT TIME ZONE 'UTC') = $2::date
|
||||
ORDER BY tp.ts`,
|
||||
userID, date,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
return pgx.CollectRows(rows, func(row pgx.CollectableRow) (domain.Trackpoint, error) {
|
||||
var p domain.Trackpoint
|
||||
err := row.Scan(
|
||||
&p.EventID, &p.DeviceID, &p.TripID, &p.Timestamp,
|
||||
&p.Lat, &p.Lon, &p.Source, &p.Note,
|
||||
&p.AccuracyM, &p.SpeedMps, &p.BearingDeg, &p.AltitudeM,
|
||||
)
|
||||
return p, err
|
||||
})
|
||||
}
|
||||
|
||||
func (s *TrackpointStore) ListDays(ctx context.Context, userID, from, to string) ([]domain.DaySummary, error) {
|
||||
rows, err := s.pool.Query(ctx, `
|
||||
SELECT DATE(tp.ts AT TIME ZONE 'UTC')::text AS date,
|
||||
COUNT(*) AS cnt,
|
||||
MIN(tp.ts),
|
||||
MAX(tp.ts)
|
||||
FROM trackpoints tp
|
||||
JOIN devices d ON d.device_id = tp.device_id
|
||||
WHERE d.user_id = $1
|
||||
AND DATE(tp.ts AT TIME ZONE 'UTC') BETWEEN $2::date AND $3::date
|
||||
GROUP BY DATE(tp.ts AT TIME ZONE 'UTC')
|
||||
ORDER BY date`,
|
||||
userID, from, to,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
return pgx.CollectRows(rows, func(row pgx.CollectableRow) (domain.DaySummary, error) {
|
||||
var d domain.DaySummary
|
||||
err := row.Scan(&d.Date, &d.Count, &d.FirstTS, &d.LastTS)
|
||||
return d, err
|
||||
})
|
||||
}
|
||||
93
backend/internal/db/trackpoints_test.go
Normal file
93
backend/internal/db/trackpoints_test.go
Normal file
@@ -0,0 +1,93 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/jacek/pamietnik/backend/internal/domain"
|
||||
)
|
||||
|
||||
func TestValidateTrackpoint_HappyPath(t *testing.T) {
|
||||
p := domain.Trackpoint{
|
||||
EventID: "evt-1",
|
||||
DeviceID: "dev-1",
|
||||
Lat: 52.5,
|
||||
Lon: 13.4,
|
||||
Source: "gps",
|
||||
}
|
||||
if err := validateTrackpoint(p); err != nil {
|
||||
t.Fatalf("expected no error, got: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestValidateTrackpoint_MissingEventID(t *testing.T) {
|
||||
p := domain.Trackpoint{
|
||||
DeviceID: "dev-1",
|
||||
Lat: 52.5,
|
||||
Lon: 13.4,
|
||||
}
|
||||
err := validateTrackpoint(p)
|
||||
if err == nil {
|
||||
t.Fatal("expected error for missing event_id, got nil")
|
||||
}
|
||||
}
|
||||
|
||||
func TestValidateTrackpoint_MissingDeviceID(t *testing.T) {
|
||||
p := domain.Trackpoint{
|
||||
EventID: "evt-1",
|
||||
Lat: 52.5,
|
||||
Lon: 13.4,
|
||||
}
|
||||
err := validateTrackpoint(p)
|
||||
if err == nil {
|
||||
t.Fatal("expected error for missing device_id, got nil")
|
||||
}
|
||||
}
|
||||
|
||||
func TestValidateTrackpoint_LatOutOfRange(t *testing.T) {
|
||||
cases := []struct {
|
||||
lat float64
|
||||
lon float64
|
||||
}{
|
||||
{91, 0},
|
||||
{-91, 0},
|
||||
{0, 181},
|
||||
{0, -181},
|
||||
}
|
||||
for _, c := range cases {
|
||||
p := domain.Trackpoint{
|
||||
EventID: "evt-1",
|
||||
DeviceID: "dev-1",
|
||||
Lat: c.lat,
|
||||
Lon: c.lon,
|
||||
}
|
||||
if err := validateTrackpoint(p); err == nil {
|
||||
t.Errorf("expected error for lat=%v lon=%v, got nil", c.lat, c.lon)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestValidateTrackpoint_InvalidSource(t *testing.T) {
|
||||
p := domain.Trackpoint{
|
||||
EventID: "evt-1",
|
||||
DeviceID: "dev-1",
|
||||
Lat: 10,
|
||||
Lon: 10,
|
||||
Source: "satellite",
|
||||
}
|
||||
if err := validateTrackpoint(p); err == nil {
|
||||
t.Fatal("expected error for invalid source, got nil")
|
||||
}
|
||||
}
|
||||
|
||||
func TestValidateTrackpoint_EmptySourceIsAllowed(t *testing.T) {
|
||||
p := domain.Trackpoint{
|
||||
EventID: "evt-1",
|
||||
DeviceID: "dev-1",
|
||||
Lat: 10,
|
||||
Lon: 10,
|
||||
Source: "",
|
||||
}
|
||||
if err := validateTrackpoint(p); err != nil {
|
||||
t.Fatalf("empty source should be allowed, got: %v", err)
|
||||
}
|
||||
}
|
||||
64
backend/internal/db/users.go
Normal file
64
backend/internal/db/users.go
Normal file
@@ -0,0 +1,64 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"github.com/jackc/pgx/v5/pgxpool"
|
||||
|
||||
"github.com/jacek/pamietnik/backend/internal/domain"
|
||||
)
|
||||
|
||||
type UserStore struct {
|
||||
pool *pgxpool.Pool
|
||||
}
|
||||
|
||||
func NewUserStore(pool *pgxpool.Pool) *UserStore {
|
||||
return &UserStore{pool: pool}
|
||||
}
|
||||
|
||||
// SeedAdminUser creates an admin user if no users exist yet.
|
||||
// Returns (true, nil) if the user was created, (false, nil) if users already exist.
|
||||
func SeedAdminUser(ctx context.Context, pool *pgxpool.Pool, username, passwordHash string) (bool, error) {
|
||||
var count int
|
||||
if err := pool.QueryRow(ctx, `SELECT COUNT(*) FROM users`).Scan(&count); err != nil {
|
||||
return false, fmt.Errorf("count users: %w", err)
|
||||
}
|
||||
if count > 0 {
|
||||
return false, nil
|
||||
}
|
||||
_, err := pool.Exec(ctx,
|
||||
`INSERT INTO users (username, password_hash, is_admin) VALUES ($1, $2, true)`,
|
||||
username, passwordHash,
|
||||
)
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("insert admin: %w", err)
|
||||
}
|
||||
return true, nil
|
||||
}
|
||||
|
||||
// ListUsers returns all users ordered by created_at.
|
||||
func (s *UserStore) ListUsers(ctx context.Context) ([]domain.User, error) {
|
||||
rows, err := s.pool.Query(ctx,
|
||||
`SELECT user_id, username, is_admin, created_at FROM users ORDER BY created_at`,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var users []domain.User
|
||||
for rows.Next() {
|
||||
var u domain.User
|
||||
if err := rows.Scan(&u.UserID, &u.Username, &u.IsAdmin, &u.CreatedAt); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
users = append(users, u)
|
||||
}
|
||||
return users, rows.Err()
|
||||
}
|
||||
|
||||
// DeleteUser removes a user by ID.
|
||||
func (s *UserStore) DeleteUser(ctx context.Context, userID string) error {
|
||||
_, err := s.pool.Exec(ctx, `DELETE FROM users WHERE user_id = $1`, userID)
|
||||
return err
|
||||
}
|
||||
88
backend/internal/domain/models.go
Normal file
88
backend/internal/domain/models.go
Normal file
@@ -0,0 +1,88 @@
|
||||
package domain
|
||||
|
||||
import "time"
|
||||
|
||||
type Trackpoint struct {
|
||||
EventID string `json:"event_id"`
|
||||
DeviceID string `json:"device_id"`
|
||||
TripID string `json:"trip_id"`
|
||||
Timestamp time.Time `json:"timestamp"`
|
||||
Lat float64 `json:"lat"`
|
||||
Lon float64 `json:"lon"`
|
||||
Source string `json:"source"` // "gps" | "manual"
|
||||
Note string `json:"note,omitempty"`
|
||||
AccuracyM *float64 `json:"accuracy_m,omitempty"`
|
||||
SpeedMps *float64 `json:"speed_mps,omitempty"`
|
||||
BearingDeg *float64 `json:"bearing_deg,omitempty"`
|
||||
AltitudeM *float64 `json:"altitude_m,omitempty"`
|
||||
}
|
||||
|
||||
type Stop struct {
|
||||
StopID string `json:"stop_id"`
|
||||
DeviceID string `json:"device_id"`
|
||||
TripID string `json:"trip_id"`
|
||||
StartTS time.Time `json:"start_ts"`
|
||||
EndTS time.Time `json:"end_ts"`
|
||||
CenterLat float64 `json:"center_lat"`
|
||||
CenterLon float64 `json:"center_lon"`
|
||||
DurationS int `json:"duration_s"`
|
||||
PlaceLabel string `json:"place_label,omitempty"`
|
||||
PlaceDetails map[string]any `json:"place_details,omitempty"`
|
||||
}
|
||||
|
||||
type Suggestion struct {
|
||||
SuggestionID string `json:"suggestion_id"`
|
||||
StopID string `json:"stop_id"`
|
||||
Type string `json:"type"` // "highlight" | "name_place" | "add_note"
|
||||
Title string `json:"title"`
|
||||
Text string `json:"text"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
DismissedAt *time.Time `json:"dismissed_at,omitempty"`
|
||||
}
|
||||
|
||||
type DaySummary struct {
|
||||
Date string `json:"date"`
|
||||
Count int `json:"count"`
|
||||
FirstTS *time.Time `json:"first_ts,omitempty"`
|
||||
LastTS *time.Time `json:"last_ts,omitempty"`
|
||||
}
|
||||
|
||||
type JournalEntry struct {
|
||||
EntryID string `json:"entry_id"`
|
||||
UserID string `json:"user_id"`
|
||||
EntryDate string `json:"entry_date"` // YYYY-MM-DD
|
||||
EntryTime string `json:"entry_time"` // HH:MM
|
||||
Title string `json:"title"`
|
||||
Description string `json:"description"`
|
||||
Lat *float64 `json:"lat,omitempty"`
|
||||
Lon *float64 `json:"lon,omitempty"`
|
||||
Visibility string `json:"visibility"` // "public" | "private"
|
||||
Hashtags []string `json:"hashtags"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
Images []JournalImage `json:"images,omitempty"`
|
||||
}
|
||||
|
||||
type JournalImage struct {
|
||||
ImageID string `json:"image_id"`
|
||||
EntryID string `json:"entry_id"`
|
||||
Filename string `json:"filename"`
|
||||
OriginalName string `json:"original_name"`
|
||||
MimeType string `json:"mime_type"`
|
||||
SizeBytes int64 `json:"size_bytes"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
}
|
||||
|
||||
type User struct {
|
||||
UserID string `json:"user_id"`
|
||||
Username string `json:"username"`
|
||||
PasswordHash string `json:"-"`
|
||||
IsAdmin bool `json:"is_admin"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
}
|
||||
|
||||
type Session struct {
|
||||
SessionID string `json:"session_id"`
|
||||
UserID string `json:"user_id"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
ExpiresAt time.Time `json:"expires_at"`
|
||||
}
|
||||
8
backend/start.sh
Executable file
8
backend/start.sh
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
export DATABASE_URL="${DATABASE_URL:-postgres://ralph:ralph@localhost:5432/ralph?sslmode=disable}"
|
||||
export LISTEN_ADDR="${LISTEN_ADDR:-:8081}"
|
||||
|
||||
echo "Starting RALPH backend on $LISTEN_ADDR ..."
|
||||
exec go run ./cmd/server
|
||||
1
backend/static-ts/.gitignore
vendored
Normal file
1
backend/static-ts/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
node_modules/
|
||||
95
backend/static-ts/autoplay.ts
Normal file
95
backend/static-ts/autoplay.ts
Normal file
@@ -0,0 +1,95 @@
|
||||
(function () {
|
||||
'use strict';
|
||||
|
||||
/* ── Background player ───────────────────────────────────── */
|
||||
const bgAudio = new Audio();
|
||||
let bgPlaying = false;
|
||||
let bgBar: HTMLElement | null = null;
|
||||
let bgTitle: HTMLElement | null = null;
|
||||
let bgPlayBtn: HTMLButtonElement | null = null;
|
||||
|
||||
function createBgBar(): void {
|
||||
if (bgBar) return;
|
||||
bgBar = document.createElement('div');
|
||||
bgBar.id = 'bg-bar';
|
||||
bgBar.innerHTML =
|
||||
'<span id="bg-title"></span>' +
|
||||
'<button id="bg-play" aria-label="Abspielen">▶</button>' +
|
||||
'<button id="bg-close" aria-label="Schließen">✕</button>';
|
||||
document.body.appendChild(bgBar);
|
||||
|
||||
bgTitle = document.getElementById('bg-title');
|
||||
bgPlayBtn = document.getElementById('bg-play') as HTMLButtonElement;
|
||||
|
||||
bgPlayBtn.addEventListener('click', function () {
|
||||
if (bgAudio.paused) void bgAudio.play(); else bgAudio.pause();
|
||||
});
|
||||
document.getElementById('bg-close')?.addEventListener('click', function () {
|
||||
bgAudio.pause();
|
||||
if (bgBar) bgBar.style.display = 'none';
|
||||
});
|
||||
bgAudio.addEventListener('play', function () { if (bgPlayBtn) bgPlayBtn.textContent = '⏸'; });
|
||||
bgAudio.addEventListener('pause', function () { if (bgPlayBtn) bgPlayBtn.textContent = '▶'; });
|
||||
bgAudio.addEventListener('ended', function () { if (bgPlayBtn) bgPlayBtn.textContent = '▶'; });
|
||||
}
|
||||
|
||||
function sendToBg(src: string, title: string): void {
|
||||
createBgBar();
|
||||
if (bgBar) bgBar.style.display = 'flex';
|
||||
bgAudio.src = src;
|
||||
if (bgTitle) bgTitle.textContent = title;
|
||||
void bgAudio.play();
|
||||
}
|
||||
|
||||
// Attach "♪" button to every inline audio player
|
||||
document.querySelectorAll<HTMLAudioElement>('audio.media-audio').forEach(function (a) {
|
||||
const btn = document.createElement('button');
|
||||
btn.className = 'btn-bg-music';
|
||||
btn.textContent = '♪ Hintergrundmusik';
|
||||
btn.type = 'button';
|
||||
const title = a.title || a.src.split('/').pop() || a.src;
|
||||
btn.addEventListener('click', function () { sendToBg(a.src, title); });
|
||||
a.insertAdjacentElement('afterend', btn);
|
||||
});
|
||||
|
||||
/* ── Video autoplay + coordination ──────────────────────── */
|
||||
const obs = new IntersectionObserver(function (entries: IntersectionObserverEntry[]) {
|
||||
entries.forEach(function (e) {
|
||||
const v = e.target as HTMLVideoElement;
|
||||
if (e.isIntersecting) {
|
||||
void v.play();
|
||||
} else {
|
||||
v.pause();
|
||||
}
|
||||
});
|
||||
}, { threshold: 0.3 });
|
||||
|
||||
document.querySelectorAll<HTMLVideoElement>('video.media-embed').forEach(function (v) {
|
||||
v.muted = true;
|
||||
v.loop = true;
|
||||
v.setAttribute('playsinline', '');
|
||||
obs.observe(v);
|
||||
|
||||
// User unmutes → pause background music
|
||||
v.addEventListener('volumechange', function () {
|
||||
if (!v.muted && !v.paused) {
|
||||
bgPlaying = !bgAudio.paused;
|
||||
bgAudio.pause();
|
||||
}
|
||||
// Video muted again → resume background
|
||||
if (v.muted && bgPlaying) {
|
||||
void bgAudio.play();
|
||||
bgPlaying = false;
|
||||
}
|
||||
});
|
||||
|
||||
// Video pauses or ends → resume background if it was playing
|
||||
v.addEventListener('pause', function () {
|
||||
if (bgPlaying) { void bgAudio.play(); bgPlaying = false; }
|
||||
});
|
||||
v.addEventListener('ended', function () {
|
||||
if (bgPlaying) { void bgAudio.play(); bgPlaying = false; }
|
||||
});
|
||||
});
|
||||
|
||||
})();
|
||||
31
backend/static-ts/day.ts
Normal file
31
backend/static-ts/day.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
// GPS button
|
||||
document.getElementById('btn-gps')?.addEventListener('click', function () {
|
||||
const status = document.getElementById('gps-status') as HTMLElement;
|
||||
if (!navigator.geolocation) {
|
||||
status.textContent = '// GPS nicht verfügbar';
|
||||
return;
|
||||
}
|
||||
status.textContent = '// Standort wird ermittelt...';
|
||||
navigator.geolocation.getCurrentPosition(
|
||||
function (pos: GeolocationPosition) {
|
||||
(document.getElementById('entry-lat') as HTMLInputElement).value = pos.coords.latitude.toFixed(6);
|
||||
(document.getElementById('entry-lon') as HTMLInputElement).value = pos.coords.longitude.toFixed(6);
|
||||
status.textContent = '// Standort gesetzt (' + pos.coords.accuracy.toFixed(0) + ' m Genauigkeit)';
|
||||
},
|
||||
function (err: GeolocationPositionError) {
|
||||
status.textContent = '// Fehler: ' + err.message;
|
||||
},
|
||||
{ enableHighAccuracy: true, timeout: 10000 }
|
||||
);
|
||||
});
|
||||
|
||||
// Set current time as default
|
||||
(function () {
|
||||
const input = document.getElementById('entry-time') as HTMLInputElement | null;
|
||||
if (input && !input.value) {
|
||||
const now = new Date();
|
||||
const hh = String(now.getHours()).padStart(2, '0');
|
||||
const mm = String(now.getMinutes()).padStart(2, '0');
|
||||
input.value = hh + ':' + mm;
|
||||
}
|
||||
})();
|
||||
81
backend/static-ts/editor.ts
Normal file
81
backend/static-ts/editor.ts
Normal file
@@ -0,0 +1,81 @@
|
||||
(function () {
|
||||
'use strict';
|
||||
|
||||
interface UploadResponse {
|
||||
filename: string;
|
||||
mime: string;
|
||||
ref: string;
|
||||
}
|
||||
|
||||
function initEditor(ta: HTMLTextAreaElement): void {
|
||||
async function upload(file: File): Promise<void> {
|
||||
const form = new FormData();
|
||||
form.append('file', file);
|
||||
const statusEl = ta.parentElement?.querySelector<HTMLElement>('.upload-status');
|
||||
if (statusEl) statusEl.textContent = '↑ ' + file.name + ' …';
|
||||
try {
|
||||
const res = await fetch('/media', { method: 'POST', body: form });
|
||||
if (!res.ok) {
|
||||
if (statusEl) statusEl.textContent = '✗ Fehler beim Hochladen';
|
||||
return;
|
||||
}
|
||||
const data: UploadResponse = await res.json();
|
||||
insertAtCursor('\n' + data.ref + '\n');
|
||||
if (statusEl) statusEl.textContent = '';
|
||||
} catch (_e) {
|
||||
if (statusEl) statusEl.textContent = '✗ Fehler beim Hochladen';
|
||||
}
|
||||
}
|
||||
|
||||
function insertAtCursor(text: string): void {
|
||||
const start = ta.selectionStart;
|
||||
ta.value = ta.value.slice(0, start) + text + ta.value.slice(ta.selectionEnd);
|
||||
ta.selectionStart = ta.selectionEnd = start + text.length;
|
||||
ta.focus();
|
||||
}
|
||||
|
||||
// Paste: catch file pastes
|
||||
ta.addEventListener('paste', function (e: ClipboardEvent) {
|
||||
const items = e.clipboardData?.items;
|
||||
if (!items) return;
|
||||
for (let i = 0; i < items.length; i++) {
|
||||
if (items[i].kind === 'file') {
|
||||
e.preventDefault();
|
||||
const file = items[i].getAsFile();
|
||||
if (file) void upload(file);
|
||||
return;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Drag & Drop onto textarea
|
||||
ta.addEventListener('dragover', function (e: DragEvent) {
|
||||
e.preventDefault();
|
||||
ta.classList.add('drag-over');
|
||||
});
|
||||
ta.addEventListener('dragleave', function () {
|
||||
ta.classList.remove('drag-over');
|
||||
});
|
||||
ta.addEventListener('drop', function (e: DragEvent) {
|
||||
e.preventDefault();
|
||||
ta.classList.remove('drag-over');
|
||||
const files = e.dataTransfer?.files;
|
||||
if (!files) return;
|
||||
for (let i = 0; i < files.length; i++) void upload(files[i]);
|
||||
});
|
||||
|
||||
// File picker button
|
||||
const picker = ta.parentElement?.querySelector<HTMLButtonElement>('.media-picker');
|
||||
const input = ta.parentElement?.querySelector<HTMLInputElement>('.media-file-input');
|
||||
if (picker && input) {
|
||||
picker.addEventListener('click', function () { input.click(); });
|
||||
input.addEventListener('change', function () {
|
||||
if (!input.files) return;
|
||||
Array.from(input.files).forEach(f => void upload(f));
|
||||
input.value = '';
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
document.querySelectorAll<HTMLTextAreaElement>('textarea[name="description"]').forEach(initEditor);
|
||||
})();
|
||||
30
backend/static-ts/package-lock.json
generated
Normal file
30
backend/static-ts/package-lock.json
generated
Normal file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"name": "static-ts",
|
||||
"version": "1.0.0",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "static-ts",
|
||||
"version": "1.0.0",
|
||||
"license": "ISC",
|
||||
"devDependencies": {
|
||||
"typescript": "^6.0.2"
|
||||
}
|
||||
},
|
||||
"node_modules/typescript": {
|
||||
"version": "6.0.2",
|
||||
"resolved": "https://registry.npmjs.org/typescript/-/typescript-6.0.2.tgz",
|
||||
"integrity": "sha512-bGdAIrZ0wiGDo5l8c++HWtbaNCWTS4UTv7RaTH/ThVIgjkveJt83m74bBHMJkuCbslY8ixgLBVZJIOiQlQTjfQ==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"bin": {
|
||||
"tsc": "bin/tsc",
|
||||
"tsserver": "bin/tsserver"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14.17"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
17
backend/static-ts/package.json
Normal file
17
backend/static-ts/package.json
Normal file
@@ -0,0 +1,17 @@
|
||||
{
|
||||
"name": "static-ts",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"watch": "tsc --watch"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"type": "commonjs",
|
||||
"devDependencies": {
|
||||
"typescript": "^6.0.2"
|
||||
}
|
||||
}
|
||||
13
backend/static-ts/tsconfig.json
Normal file
13
backend/static-ts/tsconfig.json
Normal file
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2017",
|
||||
"lib": ["ES2017", "DOM"],
|
||||
"strict": true,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"module": "None",
|
||||
"ignoreDeprecations": "6.0",
|
||||
"outDir": "../internal/api/static"
|
||||
},
|
||||
"include": ["./*.ts"]
|
||||
}
|
||||
12
backend/stop.sh
Executable file
12
backend/stop.sh
Executable file
@@ -0,0 +1,12 @@
|
||||
#!/bin/bash
|
||||
PORT="${LISTEN_ADDR:-:8081}"
|
||||
PORT="${PORT#:}"
|
||||
|
||||
PID=$(lsof -ti:$PORT)
|
||||
if [ -z "$PID" ]; then
|
||||
echo "Server is not running on port $PORT"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
kill $PID
|
||||
echo "Server stopped (port $PORT, pid $PID)"
|
||||
@@ -178,16 +178,17 @@ backend/
|
||||
│ ├── trackpoints.go UpsertBatch, ListByDate, ListDays, EnsureDevice
|
||||
│ ├── stops.go ListByDate
|
||||
│ ├── suggestions.go ListByDate
|
||||
│ └── journal.go CRUD Journal Entries + Images
|
||||
│ ├── journal.go CRUD Journal Entries + Images; ListPublic, ListByUser
|
||||
│ └── users.go ListUsers, DeleteUser
|
||||
├── auth/
|
||||
│ └── auth.go HashPassword, VerifyPassword, Login, GetSession, Logout
|
||||
│ └── auth.go HashPassword, VerifyPassword, Login, Register, GetSession, Logout
|
||||
└── api/
|
||||
├── router.go chi Routing, Middleware-Gruppen
|
||||
├── middleware.go RequireAuth (Session Cookie → Context)
|
||||
├── middleware.go RequireAuth, requireAdmin (Session Cookie → Context)
|
||||
├── ingest.go HandleSingleTrackpoint, HandleBatchTrackpoints
|
||||
├── query.go HandleListDays, HandleListTrackpoints, Stops, Suggestions
|
||||
├── webui.go Server-side rendered Web UI (Go Templates)
|
||||
├── journal.go Journal Entry Endpoints
|
||||
├── webui.go Web UI: Feed, Register, Days, Admin-Handlers
|
||||
├── journal.go Journal Entry Endpoints (inkl. visibility + hashtags)
|
||||
└── response.go writeJSON, writeError helpers
|
||||
```
|
||||
|
||||
@@ -365,6 +366,101 @@ Schema wird beim API-Start automatisch initialisiert (keine separate Migration n
|
||||
|
||||
---
|
||||
|
||||
## 7b. Datenbankschema
|
||||
|
||||
```mermaid
|
||||
erDiagram
|
||||
users {
|
||||
TEXT user_id PK
|
||||
TEXT username UK
|
||||
TEXT password_hash
|
||||
BOOLEAN is_admin
|
||||
TIMESTAMPTZ created_at
|
||||
}
|
||||
sessions {
|
||||
TEXT session_id PK
|
||||
TEXT user_id FK
|
||||
TIMESTAMPTZ created_at
|
||||
TIMESTAMPTZ expires_at
|
||||
}
|
||||
devices {
|
||||
TEXT device_id PK
|
||||
TEXT user_id FK
|
||||
TIMESTAMPTZ created_at
|
||||
}
|
||||
trackpoints {
|
||||
BIGSERIAL id PK
|
||||
TEXT event_id
|
||||
TEXT device_id FK
|
||||
TEXT trip_id
|
||||
TIMESTAMPTZ ts
|
||||
DOUBLE lat
|
||||
DOUBLE lon
|
||||
TEXT source
|
||||
TEXT note
|
||||
}
|
||||
stops {
|
||||
TEXT stop_id PK
|
||||
TEXT device_id FK
|
||||
TEXT trip_id
|
||||
TIMESTAMPTZ start_ts
|
||||
TIMESTAMPTZ end_ts
|
||||
DOUBLE center_lat
|
||||
DOUBLE center_lon
|
||||
INT duration_s
|
||||
TEXT place_label
|
||||
}
|
||||
suggestions {
|
||||
TEXT suggestion_id PK
|
||||
TEXT stop_id FK
|
||||
TEXT type
|
||||
TEXT title
|
||||
TEXT text
|
||||
TIMESTAMPTZ created_at
|
||||
TIMESTAMPTZ dismissed_at
|
||||
}
|
||||
journal_entries {
|
||||
TEXT entry_id PK
|
||||
TEXT user_id FK
|
||||
DATE entry_date
|
||||
TIME entry_time
|
||||
TEXT title
|
||||
TEXT description
|
||||
DOUBLE lat
|
||||
DOUBLE lon
|
||||
TEXT visibility
|
||||
TEXT[] hashtags
|
||||
TIMESTAMPTZ created_at
|
||||
}
|
||||
journal_images {
|
||||
TEXT image_id PK
|
||||
TEXT entry_id FK
|
||||
TEXT filename
|
||||
TEXT original_name
|
||||
TEXT mime_type
|
||||
BIGINT size_bytes
|
||||
TIMESTAMPTZ created_at
|
||||
}
|
||||
|
||||
users ||--o{ sessions : has
|
||||
users ||--o{ devices : owns
|
||||
users ||--o{ journal_entries : writes
|
||||
devices ||--o{ trackpoints : records
|
||||
stops ||--o{ suggestions : generates
|
||||
journal_entries ||--o{ journal_images : contains
|
||||
```
|
||||
|
||||
**Wichtige Felder:**
|
||||
|
||||
| Tabelle | Feld | Bedeutung |
|
||||
|---------|------|-----------|
|
||||
| `users` | `is_admin` | Admin-Flag für Zugang zum Admin-Bereich |
|
||||
| `journal_entries` | `visibility` | `public` = im öffentlichen Feed sichtbar; `private` = nur für Autor |
|
||||
| `journal_entries` | `hashtags` | Kommagetrennte Tags als `TEXT[]`-Array |
|
||||
| `trackpoints` | `(device_id, event_id)` | UNIQUE-Constraint für Idempotenz |
|
||||
|
||||
---
|
||||
|
||||
## 8. Querschnittskonzepte
|
||||
|
||||
### Authentifizierung & Sessions (REQ-AUTH-01, REQ-AUTH-02, DEC-AUTH-01)
|
||||
|
||||
79
doc/deployment.md
Normal file
79
doc/deployment.md
Normal file
@@ -0,0 +1,79 @@
|
||||
# Deployment
|
||||
|
||||
## Voraussetzungen
|
||||
|
||||
- Synology NAS mit laufendem shared PostgreSQL Stack (`/volume2/docker/shared/`)
|
||||
- act_runner registriert und online
|
||||
- Siehe Infra-Repo für Setup-Details
|
||||
|
||||
---
|
||||
|
||||
## Datenbank einrichten (einmalig)
|
||||
|
||||
```bash
|
||||
sudo docker exec -it shared-postgres-1 psql -U postgres
|
||||
```
|
||||
|
||||
```sql
|
||||
CREATE DATABASE pamietnik;
|
||||
CREATE USER pamietnik WITH PASSWORD '<passwort>';
|
||||
GRANT ALL PRIVILEGES ON DATABASE pamietnik TO pamietnik;
|
||||
GRANT ALL ON SCHEMA public TO pamietnik;
|
||||
\q
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Gitea Secrets & Variables
|
||||
|
||||
**Repository → Einstellungen → Actions → Secrets:**
|
||||
|
||||
| Secret | Wert |
|
||||
|--------|------|
|
||||
| `DB_PASSWORD` | Passwort des `pamietnik` DB-Users |
|
||||
|
||||
**Repository → Einstellungen → Actions → Variables:**
|
||||
|
||||
| Variable | Wert |
|
||||
|----------|------|
|
||||
| `DEPLOY_DIR` | `/volume2/docker/pamietnik` |
|
||||
| `DB_USER` | `pamietnik` |
|
||||
| `DB_NAME` | `pamietnik` |
|
||||
| `APP_PORT` | `9050` |
|
||||
|
||||
---
|
||||
|
||||
## Deploy
|
||||
|
||||
Push auf `main` triggert automatisch den Workflow (`.gitea/workflows/deploy.yml`):
|
||||
|
||||
1. Code nach `/volume2/docker/pamietnik/` klonen/pullen
|
||||
2. `.env` mit DB-Credentials schreiben
|
||||
3. `docker compose up --build -d`
|
||||
4. Health check auf `/healthz`
|
||||
|
||||
App erreichbar unter: `http://192.168.1.4:9050`
|
||||
|
||||
---
|
||||
|
||||
## Ersten User anlegen
|
||||
|
||||
```bash
|
||||
sudo docker exec -it pamietnik-api-1 /createuser
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Logs & Wartung
|
||||
|
||||
```bash
|
||||
# Logs
|
||||
sudo docker compose -f /volume2/docker/pamietnik/docker-compose.yml logs -f
|
||||
|
||||
# Neustart
|
||||
sudo docker compose -f /volume2/docker/pamietnik/docker-compose.yml restart
|
||||
|
||||
# Backup
|
||||
sudo docker exec shared-postgres-1 pg_dump -U postgres pamietnik \
|
||||
> /volume2/docker/shared/backup_$(date +%Y%m%d)_pamietnik.sql
|
||||
```
|
||||
@@ -1,31 +1,18 @@
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:16-alpine
|
||||
environment:
|
||||
POSTGRES_USER: ${DB_USER:-pamietnik}
|
||||
POSTGRES_PASSWORD: ${DB_PASSWORD:?DB_PASSWORD is required}
|
||||
POSTGRES_DB: ${DB_NAME:-pamietnik}
|
||||
volumes:
|
||||
- pgdata:/var/lib/postgresql/data
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U ${DB_USER:-pamietnik}"]
|
||||
interval: 5s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
api:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
ports:
|
||||
- "9050:8080"
|
||||
- "${APP_PORT:-9050}:8080"
|
||||
extra_hosts:
|
||||
- "host-gateway:host-gateway"
|
||||
environment:
|
||||
DATABASE_URL: postgres://${DB_USER:-pamietnik}:${DB_PASSWORD:?DB_PASSWORD is required}@postgres:5432/${DB_NAME:-pamietnik}?sslmode=disable
|
||||
DATABASE_URL: postgres://${DB_USER:-pamietnik}:${DB_PASSWORD:?DB_PASSWORD is required}@host-gateway:5433/${DB_NAME:-pamietnik}?sslmode=disable
|
||||
LISTEN_ADDR: :8080
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
restart: unless-stopped
|
||||
|
||||
UPLOAD_DIR: /uploads
|
||||
ADMIN_USER: ${ADMIN_USER:-}
|
||||
ADMIN_PASSWORD: ${ADMIN_PASSWORD:-}
|
||||
volumes:
|
||||
pgdata:
|
||||
- /volume2/docker/pamietnik/uploads:/uploads
|
||||
restart: unless-stopped
|
||||
|
||||
465
openapi.yaml
Normal file
465
openapi.yaml
Normal file
@@ -0,0 +1,465 @@
|
||||
openapi: 3.1.0
|
||||
|
||||
info:
|
||||
title: Pamietnik API
|
||||
version: 0.1.0
|
||||
description: Life & travel journal — REST API for trackpoint ingest and data query.
|
||||
|
||||
servers:
|
||||
- url: http://192.168.1.4:9050
|
||||
description: NAS (local)
|
||||
- url: http://localhost:9050
|
||||
description: Local dev
|
||||
|
||||
security:
|
||||
- cookieAuth: []
|
||||
|
||||
paths:
|
||||
|
||||
/healthz:
|
||||
get:
|
||||
summary: Health check
|
||||
security: []
|
||||
responses:
|
||||
'200':
|
||||
description: Server is up
|
||||
content:
|
||||
text/plain:
|
||||
example: ok
|
||||
|
||||
/readyz:
|
||||
get:
|
||||
summary: Readiness check
|
||||
security: []
|
||||
responses:
|
||||
'200':
|
||||
description: Server is ready
|
||||
|
||||
# --- Ingest ---
|
||||
|
||||
/v1/trackpoints:
|
||||
post:
|
||||
summary: Ingest single trackpoint
|
||||
tags: [Ingest]
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/TrackpointInput'
|
||||
responses:
|
||||
'200':
|
||||
description: Accepted (or duplicate — idempotent)
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/BatchResponse'
|
||||
'400':
|
||||
$ref: '#/components/responses/BadRequest'
|
||||
'401':
|
||||
$ref: '#/components/responses/Unauthorized'
|
||||
|
||||
/v1/trackpoints:batch:
|
||||
post:
|
||||
summary: Ingest batch of trackpoints (max 500)
|
||||
tags: [Ingest]
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/TrackpointInput'
|
||||
maxItems: 500
|
||||
responses:
|
||||
'200':
|
||||
description: Processed — check accepted_ids and rejected for details
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/BatchResponse'
|
||||
'400':
|
||||
$ref: '#/components/responses/BadRequest'
|
||||
'401':
|
||||
$ref: '#/components/responses/Unauthorized'
|
||||
|
||||
# --- Query ---
|
||||
|
||||
/v1/days:
|
||||
get:
|
||||
summary: List days with trackpoint activity
|
||||
tags: [Query]
|
||||
parameters:
|
||||
- name: from
|
||||
in: query
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
format: date
|
||||
example: '2026-01-01'
|
||||
- name: to
|
||||
in: query
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
format: date
|
||||
example: '2026-12-31'
|
||||
responses:
|
||||
'200':
|
||||
description: List of days
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/DaySummary'
|
||||
'400':
|
||||
$ref: '#/components/responses/BadRequest'
|
||||
'401':
|
||||
$ref: '#/components/responses/Unauthorized'
|
||||
|
||||
/v1/trackpoints:
|
||||
get:
|
||||
summary: List trackpoints for a date
|
||||
tags: [Query]
|
||||
parameters:
|
||||
- name: date
|
||||
in: query
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
format: date
|
||||
example: '2026-04-07'
|
||||
responses:
|
||||
'200':
|
||||
description: List of trackpoints
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/Trackpoint'
|
||||
'400':
|
||||
$ref: '#/components/responses/BadRequest'
|
||||
'401':
|
||||
$ref: '#/components/responses/Unauthorized'
|
||||
|
||||
/v1/stops:
|
||||
get:
|
||||
summary: List stops for a date
|
||||
tags: [Query]
|
||||
parameters:
|
||||
- name: date
|
||||
in: query
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
format: date
|
||||
example: '2026-04-07'
|
||||
responses:
|
||||
'200':
|
||||
description: List of stops
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/Stop'
|
||||
'401':
|
||||
$ref: '#/components/responses/Unauthorized'
|
||||
|
||||
/v1/suggestions:
|
||||
get:
|
||||
summary: List suggestions for a date
|
||||
tags: [Query]
|
||||
parameters:
|
||||
- name: date
|
||||
in: query
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
format: date
|
||||
example: '2026-04-07'
|
||||
responses:
|
||||
'200':
|
||||
description: List of suggestions
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/Suggestion'
|
||||
'401':
|
||||
$ref: '#/components/responses/Unauthorized'
|
||||
|
||||
# --- Journal ---
|
||||
|
||||
/entries:
|
||||
post:
|
||||
summary: Create journal entry with optional images
|
||||
tags: [Journal]
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
multipart/form-data:
|
||||
schema:
|
||||
type: object
|
||||
required: [date, time]
|
||||
properties:
|
||||
date:
|
||||
type: string
|
||||
format: date
|
||||
example: '2026-04-07'
|
||||
time:
|
||||
type: string
|
||||
example: '14:30'
|
||||
title:
|
||||
type: string
|
||||
description:
|
||||
type: string
|
||||
lat:
|
||||
type: number
|
||||
format: double
|
||||
lon:
|
||||
type: number
|
||||
format: double
|
||||
images:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
format: binary
|
||||
description: JPEG, PNG, WebP or HEIC — max 10 MB each, 32 MB total
|
||||
responses:
|
||||
'201':
|
||||
description: Entry created
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/JournalEntry'
|
||||
'400':
|
||||
$ref: '#/components/responses/BadRequest'
|
||||
'401':
|
||||
$ref: '#/components/responses/Unauthorized'
|
||||
|
||||
components:
|
||||
|
||||
securitySchemes:
|
||||
cookieAuth:
|
||||
type: apiKey
|
||||
in: cookie
|
||||
name: session
|
||||
|
||||
responses:
|
||||
BadRequest:
|
||||
description: Bad request
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Error'
|
||||
Unauthorized:
|
||||
description: Not authenticated
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Error'
|
||||
|
||||
schemas:
|
||||
|
||||
TrackpointInput:
|
||||
type: object
|
||||
required: [event_id, device_id, trip_id, timestamp, lat, lon]
|
||||
properties:
|
||||
event_id:
|
||||
type: string
|
||||
format: uuid
|
||||
description: Client-generated UUID — used for idempotency
|
||||
device_id:
|
||||
type: string
|
||||
trip_id:
|
||||
type: string
|
||||
timestamp:
|
||||
type: string
|
||||
format: date-time
|
||||
example: '2026-04-07T12:00:00Z'
|
||||
lat:
|
||||
type: number
|
||||
format: double
|
||||
minimum: -90
|
||||
maximum: 90
|
||||
lon:
|
||||
type: number
|
||||
format: double
|
||||
minimum: -180
|
||||
maximum: 180
|
||||
source:
|
||||
type: string
|
||||
enum: [gps, manual]
|
||||
default: gps
|
||||
note:
|
||||
type: string
|
||||
accuracy_m:
|
||||
type: number
|
||||
format: double
|
||||
speed_mps:
|
||||
type: number
|
||||
format: double
|
||||
bearing_deg:
|
||||
type: number
|
||||
format: double
|
||||
altitude_m:
|
||||
type: number
|
||||
format: double
|
||||
|
||||
Trackpoint:
|
||||
allOf:
|
||||
- $ref: '#/components/schemas/TrackpointInput'
|
||||
|
||||
BatchResponse:
|
||||
type: object
|
||||
properties:
|
||||
server_time:
|
||||
type: string
|
||||
format: date-time
|
||||
accepted_ids:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
rejected:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/RejectedItem'
|
||||
|
||||
RejectedItem:
|
||||
type: object
|
||||
properties:
|
||||
event_id:
|
||||
type: string
|
||||
code:
|
||||
type: string
|
||||
enum: [VALIDATION_ERROR, DB_ERROR, INVALID_TIMESTAMP]
|
||||
message:
|
||||
type: string
|
||||
|
||||
DaySummary:
|
||||
type: object
|
||||
properties:
|
||||
date:
|
||||
type: string
|
||||
format: date
|
||||
count:
|
||||
type: integer
|
||||
first_ts:
|
||||
type: string
|
||||
format: date-time
|
||||
last_ts:
|
||||
type: string
|
||||
format: date-time
|
||||
|
||||
Stop:
|
||||
type: object
|
||||
properties:
|
||||
stop_id:
|
||||
type: string
|
||||
device_id:
|
||||
type: string
|
||||
trip_id:
|
||||
type: string
|
||||
start_ts:
|
||||
type: string
|
||||
format: date-time
|
||||
end_ts:
|
||||
type: string
|
||||
format: date-time
|
||||
center_lat:
|
||||
type: number
|
||||
format: double
|
||||
center_lon:
|
||||
type: number
|
||||
format: double
|
||||
duration_s:
|
||||
type: integer
|
||||
place_label:
|
||||
type: string
|
||||
place_details:
|
||||
type: object
|
||||
|
||||
Suggestion:
|
||||
type: object
|
||||
properties:
|
||||
suggestion_id:
|
||||
type: string
|
||||
stop_id:
|
||||
type: string
|
||||
type:
|
||||
type: string
|
||||
enum: [highlight, name_place, add_note]
|
||||
title:
|
||||
type: string
|
||||
text:
|
||||
type: string
|
||||
created_at:
|
||||
type: string
|
||||
format: date-time
|
||||
dismissed_at:
|
||||
type: string
|
||||
format: date-time
|
||||
|
||||
JournalEntry:
|
||||
type: object
|
||||
properties:
|
||||
entry_id:
|
||||
type: string
|
||||
user_id:
|
||||
type: string
|
||||
entry_date:
|
||||
type: string
|
||||
format: date
|
||||
entry_time:
|
||||
type: string
|
||||
example: '14:30'
|
||||
title:
|
||||
type: string
|
||||
description:
|
||||
type: string
|
||||
lat:
|
||||
type: number
|
||||
format: double
|
||||
lon:
|
||||
type: number
|
||||
format: double
|
||||
created_at:
|
||||
type: string
|
||||
format: date-time
|
||||
images:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/JournalImage'
|
||||
|
||||
JournalImage:
|
||||
type: object
|
||||
properties:
|
||||
image_id:
|
||||
type: string
|
||||
entry_id:
|
||||
type: string
|
||||
filename:
|
||||
type: string
|
||||
original_name:
|
||||
type: string
|
||||
mime_type:
|
||||
type: string
|
||||
size_bytes:
|
||||
type: integer
|
||||
format: int64
|
||||
created_at:
|
||||
type: string
|
||||
format: date-time
|
||||
|
||||
Error:
|
||||
type: object
|
||||
properties:
|
||||
code:
|
||||
type: string
|
||||
message:
|
||||
type: string
|
||||
Reference in New Issue
Block a user