Compare commits
44 Commits
12e793aa7e
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| 435aa88e29 | |||
| 7a5321c097 | |||
| 2b05ff8b02 | |||
| 5d8ef96d4c | |||
| fee686a9f3 | |||
| b2d1e2bdc4 | |||
| 2d57c75d2f | |||
| d8ba64da55 | |||
| 29bbaead86 | |||
| d0ac4f1d91 | |||
| 4381a6681d | |||
| 360725c93f | |||
| dd607a04a1 | |||
| 15c419b7bf | |||
| 6f9fd5cba3 | |||
| 8c15c932b6 | |||
| c62188fda6 | |||
| 810e81b323 | |||
| 7585e43b52 | |||
| 47570257bd | |||
| 7f51c60062 | |||
| 12784462f9 | |||
| 1a5e448e4e | |||
| 5c6c871bb6 | |||
| 4ea8e56485 | |||
| 57259c2c2f | |||
| 32a2634739 | |||
| 2f610633c4 | |||
| 8cb639c1bd | |||
| e62cffc10c | |||
| b3e6f89bd2 | |||
| 64ea0a9a5d | |||
| cd218a55fe | |||
| a46e296dc3 | |||
| 6ed3b4bbe6 | |||
| cf9fa91238 | |||
| 04b2a0bcb8 | |||
| d390bdac0d | |||
| d49a9636e5 | |||
| 14a7250eef | |||
| 342fc06ce7 | |||
| 9195f143a2 | |||
| be0b3a5a5a | |||
| 3a2362b7fd |
@@ -1,16 +0,0 @@
|
|||||||
{
|
|
||||||
"permissions": {
|
|
||||||
"allow": [
|
|
||||||
"Bash(npm create:*)",
|
|
||||||
"Bash(npm install:*)",
|
|
||||||
"Bash(npm run build:*)",
|
|
||||||
"Bash(python -c:*)",
|
|
||||||
"Bash(npx vite build:*)",
|
|
||||||
"Bash(wc:*)",
|
|
||||||
"Bash(ls:*)",
|
|
||||||
"Bash(node -c:*)",
|
|
||||||
"Bash(npm run lint:*)",
|
|
||||||
"Bash(python:*)"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
18
.env.example
@@ -13,7 +13,25 @@ MQTT_BROKER_PORT=1883
|
|||||||
MQTT_ADMIN_USERNAME=admin
|
MQTT_ADMIN_USERNAME=admin
|
||||||
MQTT_ADMIN_PASSWORD=your-mqtt-admin-password
|
MQTT_ADMIN_PASSWORD=your-mqtt-admin-password
|
||||||
MOSQUITTO_PASSWORD_FILE=/etc/mosquitto/passwd
|
MOSQUITTO_PASSWORD_FILE=/etc/mosquitto/passwd
|
||||||
|
# Must be unique per running instance (VPS vs local dev)
|
||||||
|
MQTT_CLIENT_ID=bellsystems-admin-panel
|
||||||
|
# HMAC secret used to derive per-device MQTT passwords (must match firmware)
|
||||||
|
MQTT_SECRET=change-me-in-production
|
||||||
|
|
||||||
# App
|
# App
|
||||||
BACKEND_CORS_ORIGINS=["http://localhost:5173"]
|
BACKEND_CORS_ORIGINS=["http://localhost:5173"]
|
||||||
DEBUG=true
|
DEBUG=true
|
||||||
|
# Port nginx binds on the host (use 90 on VPS if 80 is taken)
|
||||||
|
NGINX_PORT=80
|
||||||
|
|
||||||
|
# Local file storage (override if you want to store data elsewhere)
|
||||||
|
SQLITE_DB_PATH=./data/database.db
|
||||||
|
BUILT_MELODIES_STORAGE_PATH=./storage/built_melodies
|
||||||
|
FIRMWARE_STORAGE_PATH=./storage/firmware
|
||||||
|
|
||||||
|
# Nextcloud WebDAV
|
||||||
|
NEXTCLOUD_URL=https://cloud.example.com
|
||||||
|
NEXTCLOUD_USERNAME=service-account@example.com
|
||||||
|
NEXTCLOUD_PASSWORD=your-password-here
|
||||||
|
NEXTCLOUD_DAV_USER=admin
|
||||||
|
NEXTCLOUD_BASE_PATH=BellSystems/Console
|
||||||
|
|||||||
22
.gitignore
vendored
@@ -1,7 +1,22 @@
|
|||||||
|
# Auto-deploy generated files
|
||||||
|
deploy.sh
|
||||||
|
deploy.log
|
||||||
|
.deploy-trigger
|
||||||
|
|
||||||
# Secrets
|
# Secrets
|
||||||
.env
|
.env
|
||||||
firebase-service-account.json
|
firebase-service-account.json
|
||||||
|
|
||||||
|
# Persistent runtime data (lives outside docker, not in git)
|
||||||
|
/data/*
|
||||||
|
!/data/.gitkeep
|
||||||
|
!/data/built_melodies/.gitkeep
|
||||||
|
|
||||||
|
# SQLite databases
|
||||||
|
*.db
|
||||||
|
*.db-shm
|
||||||
|
*.db-wal
|
||||||
|
|
||||||
# Python
|
# Python
|
||||||
__pycache__/
|
__pycache__/
|
||||||
*.pyc
|
*.pyc
|
||||||
@@ -20,4 +35,9 @@ dist/
|
|||||||
.DS_Store
|
.DS_Store
|
||||||
Thumbs.db
|
Thumbs.db
|
||||||
|
|
||||||
MAIN-APP-REFERENCE/
|
.MAIN-APP-REFERENCE/
|
||||||
|
|
||||||
|
.project-vesper-plan.md
|
||||||
|
|
||||||
|
# claude
|
||||||
|
.claude/
|
||||||
395
AUTOMATION_ENGINE_STRATEGY.md
Normal file
@@ -0,0 +1,395 @@
|
|||||||
|
# BellSystems CP — Automation & Notification Engine Strategy
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
This document defines the architecture and implementation plan for a three-tier intelligence layer built on top of the existing BellSystems Control Panel. The system consists of:
|
||||||
|
|
||||||
|
1. **Event Logs** — passive, timestamped record of notable system events
|
||||||
|
2. **Notifications** — real-time or near-real-time alerts surfaced in the UI
|
||||||
|
3. **Automation Rules** — trigger → condition → action pipelines, configurable via UI
|
||||||
|
|
||||||
|
The existing tech stack is unchanged: **FastAPI + SQLite (aiosqlite) + Firestore + React**. Everything new slots in as additional tables in `mqtt_data.db`, new backend modules, and new frontend pages/components.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Architecture
|
||||||
|
|
||||||
|
```
|
||||||
|
┌──────────────────────────────────────────────────┐
|
||||||
|
│ Scheduler Loop (runs inside existing FastAPI │
|
||||||
|
│ startup, alongside email_sync_loop) │
|
||||||
|
│ │
|
||||||
|
│ Every 60s: evaluate_rules() │
|
||||||
|
│ ↓ │
|
||||||
|
│ Rules Engine │
|
||||||
|
│ → loads enabled rules from DB │
|
||||||
|
│ → evaluates conditions against live data │
|
||||||
|
│ → fires Action Executor on match │
|
||||||
|
│ │
|
||||||
|
│ Action Executor │
|
||||||
|
│ → create_event_log() │
|
||||||
|
│ → create_notification() │
|
||||||
|
│ → send_email() (existing) │
|
||||||
|
│ → mqtt_publish_command() (existing) │
|
||||||
|
│ → update_field() │
|
||||||
|
└──────────────────────────────────────────────────┘
|
||||||
|
↕ REST / WebSocket
|
||||||
|
┌──────────────────────────────────────────────────┐
|
||||||
|
│ React Frontend │
|
||||||
|
│ - Bell icon in Header (unread count badge) │
|
||||||
|
│ - Notifications dropdown/panel │
|
||||||
|
│ - /automations page (rule CRUD) │
|
||||||
|
│ - Event Log viewer (filterable) │
|
||||||
|
└──────────────────────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Database Schema (additions to `mqtt_data.db`)
|
||||||
|
|
||||||
|
### `event_log`
|
||||||
|
Permanent, append-only record of things that happened.
|
||||||
|
|
||||||
|
```sql
|
||||||
|
CREATE TABLE IF NOT EXISTS event_log (
|
||||||
|
id TEXT PRIMARY KEY,
|
||||||
|
category TEXT NOT NULL, -- 'device' | 'crm' | 'quotation' | 'user' | 'system'
|
||||||
|
entity_type TEXT, -- 'device' | 'customer' | 'quotation' | 'user'
|
||||||
|
entity_id TEXT, -- the ID of the affected record
|
||||||
|
title TEXT NOT NULL,
|
||||||
|
detail TEXT,
|
||||||
|
severity TEXT NOT NULL DEFAULT 'info', -- 'info' | 'warning' | 'error'
|
||||||
|
rule_id TEXT, -- which automation rule triggered this (nullable)
|
||||||
|
created_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||||
|
);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_event_log_category ON event_log(category, created_at);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_event_log_entity ON event_log(entity_type, entity_id);
|
||||||
|
```
|
||||||
|
|
||||||
|
### `notifications`
|
||||||
|
Short-lived, user-facing alerts. Cleared once read or after TTL.
|
||||||
|
|
||||||
|
```sql
|
||||||
|
CREATE TABLE IF NOT EXISTS notifications (
|
||||||
|
id TEXT PRIMARY KEY,
|
||||||
|
title TEXT NOT NULL,
|
||||||
|
body TEXT,
|
||||||
|
link TEXT, -- optional frontend route, e.g. "/crm/customers/abc123"
|
||||||
|
severity TEXT NOT NULL DEFAULT 'info', -- 'info' | 'warning' | 'error' | 'success'
|
||||||
|
is_read INTEGER NOT NULL DEFAULT 0,
|
||||||
|
rule_id TEXT,
|
||||||
|
entity_type TEXT,
|
||||||
|
entity_id TEXT,
|
||||||
|
created_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||||
|
);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_notifications_read ON notifications(is_read, created_at);
|
||||||
|
```
|
||||||
|
|
||||||
|
### `automation_rules`
|
||||||
|
Stores user-defined rules. Evaluated by the scheduler.
|
||||||
|
|
||||||
|
```sql
|
||||||
|
CREATE TABLE IF NOT EXISTS automation_rules (
|
||||||
|
id TEXT PRIMARY KEY,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
description TEXT,
|
||||||
|
enabled INTEGER NOT NULL DEFAULT 1,
|
||||||
|
trigger_type TEXT NOT NULL, -- 'schedule' | 'mqtt_alert' | 'email_received'
|
||||||
|
trigger_config TEXT NOT NULL DEFAULT '{}', -- JSON
|
||||||
|
conditions TEXT NOT NULL DEFAULT '[]', -- JSON array of condition objects
|
||||||
|
actions TEXT NOT NULL DEFAULT '[]', -- JSON array of action objects
|
||||||
|
cooldown_hours REAL NOT NULL DEFAULT 0, -- min hours between firing on same entity
|
||||||
|
last_run_at TEXT,
|
||||||
|
run_count INTEGER NOT NULL DEFAULT 0,
|
||||||
|
created_at TEXT NOT NULL DEFAULT (datetime('now')),
|
||||||
|
updated_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
### `automation_run_log`
|
||||||
|
Deduplication and audit trail for rule executions.
|
||||||
|
|
||||||
|
```sql
|
||||||
|
CREATE TABLE IF NOT EXISTS automation_run_log (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
rule_id TEXT NOT NULL,
|
||||||
|
entity_type TEXT,
|
||||||
|
entity_id TEXT,
|
||||||
|
status TEXT NOT NULL, -- 'fired' | 'skipped_cooldown' | 'error'
|
||||||
|
detail TEXT,
|
||||||
|
fired_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||||
|
);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_run_log_rule ON automation_run_log(rule_id, fired_at);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_run_log_entity ON automation_run_log(entity_type, entity_id, fired_at);
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Backend Module: `automation/`
|
||||||
|
|
||||||
|
New module at `backend/automation/`, registered in `main.py`.
|
||||||
|
|
||||||
|
```
|
||||||
|
backend/automation/
|
||||||
|
├── __init__.py
|
||||||
|
├── router.py # CRUD for rules, event_log GET, notifications GET/PATCH
|
||||||
|
├── models.py # Pydantic schemas for rules, conditions, actions
|
||||||
|
├── engine.py # evaluate_rules(), condition evaluators, action executors
|
||||||
|
├── scheduler.py # automation_loop() async task, wired into main.py startup
|
||||||
|
└── database.py # DB helpers for all 4 new tables
|
||||||
|
```
|
||||||
|
|
||||||
|
### Wiring into `main.py`
|
||||||
|
|
||||||
|
```python
|
||||||
|
from automation.router import router as automation_router
|
||||||
|
from automation.scheduler import automation_loop
|
||||||
|
|
||||||
|
app.include_router(automation_router)
|
||||||
|
|
||||||
|
# In startup():
|
||||||
|
asyncio.create_task(automation_loop())
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Rule Object Structure (JSON, stored in DB)
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"id": "rule_abc123",
|
||||||
|
"name": "Quotation follow-up after 7 days",
|
||||||
|
"enabled": true,
|
||||||
|
"trigger_type": "schedule",
|
||||||
|
"trigger_config": { "interval_hours": 24 },
|
||||||
|
"conditions": [
|
||||||
|
{ "entity": "quotation", "field": "status", "op": "eq", "value": "sent" },
|
||||||
|
{ "entity": "quotation", "field": "days_since_updated", "op": "gte", "value": 7 },
|
||||||
|
{ "entity": "quotation", "field": "has_reply", "op": "eq", "value": false }
|
||||||
|
],
|
||||||
|
"actions": [
|
||||||
|
{
|
||||||
|
"type": "send_email",
|
||||||
|
"template_key": "quotation_followup",
|
||||||
|
"to": "{{quotation.client_email}}",
|
||||||
|
"subject": "Following up on Quotation {{quotation.quotation_number}}",
|
||||||
|
"body": "Hi {{customer.name}}, did you have a chance to review our quotation?"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "create_notification",
|
||||||
|
"title": "Follow-up sent to {{customer.name}}",
|
||||||
|
"link": "/crm/customers/{{quotation.customer_id}}"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "create_event_log",
|
||||||
|
"category": "quotation",
|
||||||
|
"severity": "info",
|
||||||
|
"title": "Auto follow-up sent for {{quotation.quotation_number}}"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"cooldown_hours": 168
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Supported Trigger Types
|
||||||
|
|
||||||
|
| Trigger | How it works |
|
||||||
|
|---|---|
|
||||||
|
| `schedule` | Evaluated every N hours by the background loop |
|
||||||
|
| `mqtt_alert` | Fires immediately when `_handle_alerts()` in `mqtt/logger.py` upserts an alert — hook into that function |
|
||||||
|
| `email_received` | Fires inside `sync_emails()` in `crm/email_sync.py` after a new inbound email is stored |
|
||||||
|
|
||||||
|
> **Note:** `mqtt_alert` and `email_received` triggers bypass the scheduler loop — they are called directly from the relevant handler functions, giving near-real-time response.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Supported Condition Operators
|
||||||
|
|
||||||
|
| op | Meaning |
|
||||||
|
|---|---|
|
||||||
|
| `eq` | equals |
|
||||||
|
| `neq` | not equals |
|
||||||
|
| `gt` / `gte` / `lt` / `lte` | numeric comparisons |
|
||||||
|
| `contains` | string contains |
|
||||||
|
| `is_null` / `not_null` | field presence |
|
||||||
|
| `days_since` | computed: (now - field_datetime) in days |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Supported Action Types
|
||||||
|
|
||||||
|
| Action | What it does | Notes |
|
||||||
|
|---|---|---|
|
||||||
|
| `create_event_log` | Writes to `event_log` table | Always safe to fire |
|
||||||
|
| `create_notification` | Writes to `notifications` table | Surfaces in UI bell icon |
|
||||||
|
| `send_email` | Calls existing `crm.email_sync.send_email()` | Uses existing mail accounts |
|
||||||
|
| `update_field` | Updates a field on an entity in DB/Firestore | Use carefully — define allowed fields explicitly |
|
||||||
|
| `mqtt_publish` | Calls `mqtt_manager.publish_command()` | For device auto-actions |
|
||||||
|
| `webhook` | HTTP POST to an external URL | Future / optional |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Notification System (Frontend)
|
||||||
|
|
||||||
|
### Bell Icon in Header
|
||||||
|
|
||||||
|
- Polling endpoint: `GET /api/notifications?unread=true&limit=20`
|
||||||
|
- Poll interval: 30 seconds (or switch to WebSocket push — the WS infrastructure already exists via `mqtt_manager`)
|
||||||
|
- Badge shows unread count
|
||||||
|
- Click opens a dropdown panel listing recent notifications with title, time, severity color, and optional link
|
||||||
|
|
||||||
|
### Notification Panel
|
||||||
|
- Mark as read: `PATCH /api/notifications/{id}/read`
|
||||||
|
- Mark all read: `PATCH /api/notifications/read-all`
|
||||||
|
- Link field navigates to the relevant page on click
|
||||||
|
|
||||||
|
### Toast Popups (optional, Phase 3 polish)
|
||||||
|
- Triggered by polling detecting new unread notifications since last check
|
||||||
|
- Use an existing toast component if one exists, otherwise add a lightweight one
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Automation Rules UI (`/automations`)
|
||||||
|
|
||||||
|
A new sidebar entry under Settings (sysadmin/admin only).
|
||||||
|
|
||||||
|
### Rule List Page
|
||||||
|
- Table: name, enabled toggle, trigger type, last run, run count, edit/delete
|
||||||
|
- "New Rule" button
|
||||||
|
|
||||||
|
### Rule Editor (modal or full page)
|
||||||
|
- **Name & description** — free text
|
||||||
|
- **Trigger** — dropdown: Schedule / MQTT Alert / Email Received
|
||||||
|
- Schedule: interval hours input
|
||||||
|
- MQTT Alert: subsystem filter (optional)
|
||||||
|
- Email Received: from address filter (optional)
|
||||||
|
- **Conditions** — dynamic list, each row:
|
||||||
|
- Entity selector (Quotation / Device / Customer / User)
|
||||||
|
- Field selector (populated based on entity)
|
||||||
|
- Operator dropdown
|
||||||
|
- Value input
|
||||||
|
- **Actions** — dynamic list, each row:
|
||||||
|
- Action type dropdown
|
||||||
|
- Type-specific fields (to address, subject, body for email; notification title/body; etc.)
|
||||||
|
- Template variables hint: `{{quotation.quotation_number}}`, `{{customer.name}}`, etc.
|
||||||
|
- **Cooldown** — hours between firings on the same entity
|
||||||
|
- **Enabled** toggle
|
||||||
|
|
||||||
|
### Rule Run History
|
||||||
|
- Per-rule log: when it fired, on which entity, success/error
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Event Log UI
|
||||||
|
|
||||||
|
Accessible from `/event-log` route, linked from Dashboard.
|
||||||
|
|
||||||
|
- Filterable by: category, severity, entity type, date range
|
||||||
|
- Columns: time, category, severity badge, title, entity link
|
||||||
|
- Append-only (no deletion from UI)
|
||||||
|
- Retention: purge entries older than configurable days (e.g. 180 days) via the existing `purge_loop` pattern in `mqtt/database.py`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Pre-Built Rules (Seeded on First Run, All Disabled)
|
||||||
|
|
||||||
|
These are created on first startup — the admin enables and customizes them.
|
||||||
|
|
||||||
|
| Rule | Trigger | Condition | Action |
|
||||||
|
|---|---|---|---|
|
||||||
|
| Quotation follow-up | Schedule 24h | status=sent AND days_since_updated ≥ 7 AND no reply | Send follow-up email + notify |
|
||||||
|
| Device offline warning | Schedule 1h | no heartbeat for > 2h | Create notification + event log |
|
||||||
|
| New unknown email | email_received | customer_id IS NULL | Create notification |
|
||||||
|
| Subscription expiring soon | Schedule 24h | subscription.expiry_date within 7 days | Notify + send email |
|
||||||
|
| Device critical alert | mqtt_alert | state = CRITICAL | Notify + event log + optional MQTT restart |
|
||||||
|
| Quotation expired | Schedule 24h | status=sent AND days_since_updated ≥ 30 | Update status → expired + notify |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Implementation Phases
|
||||||
|
|
||||||
|
### Phase 1 — Foundation (DB + API)
|
||||||
|
- [ ] Add 4 new tables to `mqtt/database.py` schema + migrations
|
||||||
|
- [ ] Create `automation/database.py` with all DB helpers
|
||||||
|
- [ ] Create `automation/models.py` — Pydantic schemas for rules, conditions, actions, notifications, event_log
|
||||||
|
- [ ] Create `automation/router.py` — CRUD for rules, GET event_log, GET/PATCH notifications
|
||||||
|
- [ ] Wire router into `main.py`
|
||||||
|
|
||||||
|
### Phase 2 — Rules Engine + Scheduler
|
||||||
|
- [ ] Create `automation/engine.py` — condition evaluator, template renderer, action executor
|
||||||
|
- [ ] Create `automation/scheduler.py` — `automation_loop()` async task
|
||||||
|
- [ ] Hook `email_received` trigger into `crm/email_sync.sync_emails()`
|
||||||
|
- [ ] Hook `mqtt_alert` trigger into `mqtt/logger._handle_alerts()`
|
||||||
|
- [ ] Seed pre-built (disabled) rules on first startup
|
||||||
|
- [ ] Wire `automation_loop()` into `main.py` startup
|
||||||
|
|
||||||
|
### Phase 3 — Notification UI
|
||||||
|
- [ ] Bell icon with unread badge in `Header.jsx`
|
||||||
|
- [ ] Notifications dropdown panel component
|
||||||
|
- [ ] 30s polling hook in React
|
||||||
|
- [ ] Mark read / mark all read
|
||||||
|
|
||||||
|
### Phase 4 — Automation Rules UI
|
||||||
|
- [ ] `/automations` route and rule list page
|
||||||
|
- [ ] Rule editor form (conditions + actions dynamic builder)
|
||||||
|
- [ ] Enable/disable toggle
|
||||||
|
- [ ] Run history per rule
|
||||||
|
- [ ] Add "Automations" entry to Sidebar under Settings
|
||||||
|
|
||||||
|
### Phase 5 — Event Log UI
|
||||||
|
- [ ] `/event-log` route with filterable table
|
||||||
|
- [ ] Purge policy wired into existing `purge_loop`
|
||||||
|
- [ ] Dashboard widget showing recent high-severity events
|
||||||
|
|
||||||
|
### Phase 6 — Polish
|
||||||
|
- [ ] Toast notifications on new unread detection
|
||||||
|
- [ ] Template variable previewer in rule editor
|
||||||
|
- [ ] "Run now" button per rule (for testing without waiting for scheduler)
|
||||||
|
- [ ] Named email templates stored in DB (reusable across rules)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Key Design Decisions
|
||||||
|
|
||||||
|
| Decision | Choice | Reason |
|
||||||
|
|---|---|---|
|
||||||
|
| Storage | SQLite (same `mqtt_data.db`) | Consistent with existing pattern; no new infra |
|
||||||
|
| Scheduler | `asyncio` task in FastAPI startup | Same pattern as `email_sync_loop` and `purge_loop` already in `main.py` |
|
||||||
|
| Rule format | JSON columns in DB | Flexible, UI-editable, no schema migrations per new rule type |
|
||||||
|
| Template variables | `{{entity.field}}` string interpolation | Simple to implement, readable in UI |
|
||||||
|
| Cooldown dedup | `automation_run_log` per (rule_id, entity_id) | Prevents repeat firing on same quotation/device within cooldown window |
|
||||||
|
| Notification delivery | DB polling (30s) initially | The WS infra exists (`mqtt_manager._ws_subscribers`) — easy to upgrade later |
|
||||||
|
| Pre-built rules | Seeded as disabled | Non-intrusive — admin must consciously enable each one |
|
||||||
|
| `update_field` safety | Explicit allowlist of permitted fields | Prevents accidental data corruption from misconfigured rules |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Template Variables Reference
|
||||||
|
|
||||||
|
Available inside action `body`, `subject`, `title`, `link` fields:
|
||||||
|
|
||||||
|
| Variable | Source |
|
||||||
|
|---|---|
|
||||||
|
| `{{customer.name}}` | Firestore `crm_customers` |
|
||||||
|
| `{{customer.organization}}` | Firestore `crm_customers` |
|
||||||
|
| `{{quotation.quotation_number}}` | SQLite `crm_quotations` |
|
||||||
|
| `{{quotation.final_total}}` | SQLite `crm_quotations` |
|
||||||
|
| `{{quotation.status}}` | SQLite `crm_quotations` |
|
||||||
|
| `{{quotation.client_email}}` | SQLite `crm_quotations` |
|
||||||
|
| `{{device.serial}}` | Firestore `devices` |
|
||||||
|
| `{{device.label}}` | Firestore `devices` |
|
||||||
|
| `{{alert.subsystem}}` | MQTT alert payload |
|
||||||
|
| `{{alert.state}}` | MQTT alert payload |
|
||||||
|
| `{{user.email}}` | Firestore `users` |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- `crm/email_sync.send_email()` is reused as-is for the `send_email` action type. The engine constructs the call parameters.
|
||||||
|
- `update_field` actions start with an allowlist of: `quotation.status`, `user.status`. Expand deliberately.
|
||||||
|
- For MQTT auto-restart, `mqtt_manager.publish_command(serial, "restart", {})` already works — the engine just calls it.
|
||||||
|
- Firestore is read-only from the automation engine (for customer/device lookups). All writes go to SQLite, consistent with the existing architecture.
|
||||||
|
- The `has_reply` condition on quotations is computed by checking whether any `crm_comms_log` entry exists with `direction='inbound'` and `customer_id` matching the quotation's customer, dated after the quotation's `updated_at`.
|
||||||
404
CRM_STATUS_SYSTEM_PLAN.md
Normal file
@@ -0,0 +1,404 @@
|
|||||||
|
# CRM Customer Status System — Implementation Plan
|
||||||
|
|
||||||
|
## Context
|
||||||
|
|
||||||
|
This project is a Vue/React + FastAPI + Firestore admin console located at `C:\development\bellsystems-cp`.
|
||||||
|
|
||||||
|
The frontend lives in `frontend/src/` and the backend in `backend/`.
|
||||||
|
The CRM module is at `frontend/src/crm/` and `backend/crm/`.
|
||||||
|
|
||||||
|
Currently, customers have two flat boolean flags on their Firestore document:
|
||||||
|
- `negotiating: bool`
|
||||||
|
- `has_problem: bool`
|
||||||
|
|
||||||
|
These need to be replaced with a richer, structured system as described below.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 1. Target Data Model
|
||||||
|
|
||||||
|
### 1A. On the Customer Document (`customers/{id}`)
|
||||||
|
|
||||||
|
Remove `negotiating` and `has_problem`. Add the following:
|
||||||
|
|
||||||
|
```
|
||||||
|
relationship_status: string
|
||||||
|
— one of: "lead" | "prospect" | "active" | "inactive" | "churned"
|
||||||
|
— default: "lead"
|
||||||
|
|
||||||
|
technical_issues: array of {
|
||||||
|
active: bool,
|
||||||
|
opened_date: Firestore Timestamp,
|
||||||
|
resolved_date: Firestore Timestamp | null,
|
||||||
|
note: string,
|
||||||
|
opened_by: string, ← display name or user ID of staff member
|
||||||
|
resolved_by: string | null
|
||||||
|
}
|
||||||
|
|
||||||
|
install_support: array of {
|
||||||
|
active: bool,
|
||||||
|
opened_date: Firestore Timestamp,
|
||||||
|
resolved_date: Firestore Timestamp | null,
|
||||||
|
note: string,
|
||||||
|
opened_by: string,
|
||||||
|
resolved_by: string | null
|
||||||
|
}
|
||||||
|
|
||||||
|
transaction_history: array of {
|
||||||
|
date: Firestore Timestamp,
|
||||||
|
flow: string, ← "invoice" | "payment" | "refund" | "credit"
|
||||||
|
payment_type: string | null, ← "cash" | "bank_transfer" | "card" | "paypal" — null for invoices
|
||||||
|
category: string, ← "full_payment" | "advance" | "installment"
|
||||||
|
amount: number,
|
||||||
|
currency: string, ← default "EUR"
|
||||||
|
invoice_ref: string | null,
|
||||||
|
order_ref: string | null, ← references an order document ID, nullable
|
||||||
|
recorded_by: string,
|
||||||
|
note: string
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 1B. Orders Subcollection (`customers/{id}/orders/{order_id}`)
|
||||||
|
|
||||||
|
Orders live **exclusively** as a subcollection under each customer. There is no top-level `orders`
|
||||||
|
collection. The existing top-level `orders` collection in Firestore and its corresponding backend
|
||||||
|
routes should be **removed entirely** and replaced with subcollection-based routes under
|
||||||
|
`/crm/customers/{customer_id}/orders/`.
|
||||||
|
|
||||||
|
If cross-customer order querying is ever needed in the future, use Firestore's native
|
||||||
|
`collectionGroup("orders")` query — no top-level mirror collection is required.
|
||||||
|
|
||||||
|
Each order document carries the following fields:
|
||||||
|
|
||||||
|
```
|
||||||
|
order_number: string ← e.g. "ORD-2026-041" (already exists — keep)
|
||||||
|
title: string ← NEW: human-readable name e.g. "3x Wall Mount Units - Athens Office"
|
||||||
|
created_by: string ← NEW: staff user ID or display name
|
||||||
|
|
||||||
|
status: string ← REPLACE existing OrderStatus enum with new values:
|
||||||
|
— "negotiating" | "awaiting_quotation" | "awaiting_customer_confirmation"
|
||||||
|
| "awaiting_fulfilment" | "awaiting_payment" | "manufacturing"
|
||||||
|
| "shipped" | "installed" | "declined" | "complete"
|
||||||
|
|
||||||
|
status_updated_date: Firestore Timestamp ← NEW
|
||||||
|
status_updated_by: string ← NEW
|
||||||
|
|
||||||
|
payment_status: object { ← NEW — replaces the flat PaymentStatus enum
|
||||||
|
required_amount: number,
|
||||||
|
received_amount: number, ← computed from transaction_history where order_ref matches
|
||||||
|
balance_due: number, ← computed: required_amount - received_amount
|
||||||
|
advance_required: bool,
|
||||||
|
advance_amount: number | null,
|
||||||
|
payment_complete: bool
|
||||||
|
}
|
||||||
|
|
||||||
|
timeline: array of { ← NEW — order event log
|
||||||
|
date: Firestore Timestamp,
|
||||||
|
type: string, ← "quote_request" | "quote_sent" | "quote_accepted" | "quote_declined"
|
||||||
|
| "mfg_started" | "mfg_complete" | "order_shipped" | "installed"
|
||||||
|
| "payment_received" | "invoice_sent" | "note"
|
||||||
|
note: string,
|
||||||
|
updated_by: string
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 2. Backend Changes
|
||||||
|
|
||||||
|
### 2A. `backend/crm/models.py`
|
||||||
|
|
||||||
|
- **Remove** `negotiating: bool` and `has_problem: bool` from `CustomerCreate` and `CustomerUpdate`.
|
||||||
|
- **Add** `relationship_status: Optional[str] = "lead"` to `CustomerCreate` and `CustomerUpdate`.
|
||||||
|
- **Add** `technical_issues: List[dict] = []` to `CustomerCreate` and `CustomerUpdate`.
|
||||||
|
- **Add** `install_support: List[dict] = []` to `CustomerCreate` and `CustomerUpdate`.
|
||||||
|
- **Add** `transaction_history: List[dict] = []` to `CustomerCreate` and `CustomerUpdate`.
|
||||||
|
- **Add** proper Pydantic models for each of the above array item shapes:
|
||||||
|
- `TechnicalIssue` model
|
||||||
|
- `InstallSupportEntry` model
|
||||||
|
- `TransactionEntry` model
|
||||||
|
- **Update** `OrderStatus` enum with the new values:
|
||||||
|
`negotiating`, `awaiting_quotation`, `awaiting_customer_confirmation`,
|
||||||
|
`awaiting_fulfilment`, `awaiting_payment`, `manufacturing`,
|
||||||
|
`shipped`, `installed`, `declined`, `complete`
|
||||||
|
- **Replace** the flat `PaymentStatus` enum on `OrderCreate` / `OrderUpdate` with a new `OrderPaymentStatus` Pydantic model matching the structure above.
|
||||||
|
- **Add** `title: Optional[str]`, `created_by: Optional[str]`, `status_updated_date: Optional[str]`,
|
||||||
|
`status_updated_by: Optional[str]`, and `timeline: List[dict] = []` to `OrderCreate` and `OrderUpdate`.
|
||||||
|
|
||||||
|
### 2B. `backend/crm/customers_router.py`
|
||||||
|
|
||||||
|
- Update any route that reads/writes `negotiating` or `has_problem` to use the new fields.
|
||||||
|
- Add new dedicated endpoints:
|
||||||
|
|
||||||
|
```
|
||||||
|
POST /crm/customers/{id}/technical-issues
|
||||||
|
— body: { note: str, opened_by: str }
|
||||||
|
— appends a new active issue to the array
|
||||||
|
|
||||||
|
PATCH /crm/customers/{id}/technical-issues/{index}/resolve
|
||||||
|
— body: { resolved_by: str }
|
||||||
|
— sets active=false and resolved_date=now on the item at that index
|
||||||
|
|
||||||
|
POST /crm/customers/{id}/install-support
|
||||||
|
— same pattern as technical-issues above
|
||||||
|
|
||||||
|
PATCH /crm/customers/{id}/install-support/{index}/resolve
|
||||||
|
— same as technical-issues resolve
|
||||||
|
|
||||||
|
POST /crm/customers/{id}/transactions
|
||||||
|
— body: TransactionEntry (see model above)
|
||||||
|
— appends to transaction_history
|
||||||
|
|
||||||
|
PATCH /crm/customers/{id}/relationship-status
|
||||||
|
— body: { status: str }
|
||||||
|
— updates relationship_status field
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2C. `backend/crm/orders_router.py`
|
||||||
|
|
||||||
|
- **Remove** all top-level `/crm/orders/` routes entirely.
|
||||||
|
- Re-implement all order CRUD under `/crm/customers/{customer_id}/orders/`:
|
||||||
|
|
||||||
|
```
|
||||||
|
GET /crm/customers/{customer_id}/orders/
|
||||||
|
POST /crm/customers/{customer_id}/orders/
|
||||||
|
GET /crm/customers/{customer_id}/orders/{order_id}
|
||||||
|
PATCH /crm/customers/{customer_id}/orders/{order_id}
|
||||||
|
DELETE /crm/customers/{customer_id}/orders/{order_id}
|
||||||
|
```
|
||||||
|
|
||||||
|
- Add endpoint to append a timeline event:
|
||||||
|
|
||||||
|
```
|
||||||
|
POST /crm/customers/{customer_id}/orders/{order_id}/timeline
|
||||||
|
— body: { type: str, note: str, updated_by: str }
|
||||||
|
— appends to the timeline array and updates status_updated_date + status_updated_by
|
||||||
|
```
|
||||||
|
|
||||||
|
- Add endpoint to update payment status:
|
||||||
|
|
||||||
|
```
|
||||||
|
PATCH /crm/customers/{customer_id}/orders/{order_id}/payment-status
|
||||||
|
— body: OrderPaymentStatus fields (partial update allowed)
|
||||||
|
```
|
||||||
|
|
||||||
|
- Add a dedicated "Init Negotiations" endpoint:
|
||||||
|
|
||||||
|
```
|
||||||
|
POST /crm/customers/{customer_id}/orders/init-negotiations
|
||||||
|
— body: { title: str, note: str, date: datetime, created_by: str }
|
||||||
|
— creates a new order with status="negotiating", auto-fills all other fields
|
||||||
|
— simultaneously updates the customer's relationship_status to "active"
|
||||||
|
(only if currently "lead" or "prospect" — do not downgrade an already "active" customer)
|
||||||
|
— returns the newly created order document
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 3. Frontend Changes
|
||||||
|
|
||||||
|
### 3A. `frontend/src/crm/customers/CustomerList.jsx`
|
||||||
|
|
||||||
|
- When Notes: Quick filter is set, replace the `negotiating` and `has_problem` boolean badge display in the Status column with:
|
||||||
|
- A **relationship status chip** (color-coded pill: lead=grey, prospect=blue, active=green, inactive=amber, churned=soft red)
|
||||||
|
- A small **red dot / warning icon** if `technical_issues.some(i => i.active)` is true, under a new "Support" column. Add this column to the list of arrangeable and toggleable columns.
|
||||||
|
- A small **amber dot / support icon** if `install_support.some(i => i.active)` is true, under the same "Support" column.
|
||||||
|
- These are derived from the arrays — do not store a separate boolean on the document.
|
||||||
|
- When Notes: Expanded filter is set, replace the `negotiating` and `has_problem` verbose displays with the active order status (if any) in this format:
|
||||||
|
`"<Status Label> — <Date> — <Note>"` e.g. `"Negotiating — 24.03.26 — Customer requested a more affordable quotation"`
|
||||||
|
|
||||||
|
### 3B. `frontend/src/crm/customers/CustomerDetail.jsx`
|
||||||
|
|
||||||
|
The customer detail page currently has a tab structure: Overview, Orders, Quotations, Communication, Files & Media, Devices.
|
||||||
|
|
||||||
|
Make the following changes:
|
||||||
|
|
||||||
|
#### Whole page
|
||||||
|
- On the top of the page where we display the name, organization and full address, change it to:
|
||||||
|
Line 1: `Full Title + Name + Surname`
|
||||||
|
Line 2: `Organization · City` (city only, not full address)
|
||||||
|
- Remove the horizontal separation line after the title and before the tabs.
|
||||||
|
- On the top right side, there is an Edit Customer button. To its left, add **3 new buttons** in this
|
||||||
|
order (left → right): **Init Negotiations**, **Record Issue/Support**, **Record Payment**, then
|
||||||
|
the existing Edit button. All 4 buttons are the same size. Add solid single-color icons to each.
|
||||||
|
|
||||||
|
**"Init Negotiations" button** (blue/indigo accent):
|
||||||
|
- Opens a mini modal.
|
||||||
|
- Fields: Date (defaults to NOW), Title (text input, required), Note (textarea, optional).
|
||||||
|
- Auto-filled server-side: `status = "negotiating"`, `created_by` = current user,
|
||||||
|
`status_updated_date` = now, `status_updated_by` = current user,
|
||||||
|
`payment_status` defaults to zeroed object.
|
||||||
|
- On confirm: calls `POST /crm/customers/{id}/orders/init-negotiations`.
|
||||||
|
- After success: refreshes customer data and orders list. The customer's `relationship_status`
|
||||||
|
is set to `"active"` server-side — no separate frontend call needed.
|
||||||
|
- This is a fast-entry shortcut only. All subsequent edits to this order happen via the Orders tab.
|
||||||
|
|
||||||
|
**"Record Issue/Support" button** (amber/orange accent):
|
||||||
|
- Opens a mini modal.
|
||||||
|
- At the top: a **2-button toggle selector** (not a dropdown) to choose: `Technical Issue` | `Install Support`.
|
||||||
|
- Fields: Date (defaults to NOW), Note (textarea, required).
|
||||||
|
- On confirm: calls `POST /crm/customers/{id}/technical-issues` or
|
||||||
|
`POST /crm/customers/{id}/install-support` depending on selection.
|
||||||
|
|
||||||
|
**"Record Payment" button** (green accent):
|
||||||
|
- Opens a mini modal.
|
||||||
|
- Fields: Date (defaults to NOW), Payment Type (cash | bank transfer | card | paypal),
|
||||||
|
Category (full payment | advance | installment), Amount (number), Currency (defaults to EUR),
|
||||||
|
Invoice Ref (searchable over the customer's invoices, optional),
|
||||||
|
Order Ref (searchable/selectable from the customer's orders, optional),
|
||||||
|
Note (textarea, optional).
|
||||||
|
- On confirm: calls `POST /crm/customers/{id}/transactions`.
|
||||||
|
|
||||||
|
#### Overview Tab
|
||||||
|
- The main hero section gets a complete overhaul — start fresh:
|
||||||
|
- **Row 1 — Relationship Status selector**: The 5 statuses (`lead | prospect | active | inactive | churned`) as styled pill/tab buttons in a row. Current status is highlighted with a glow effect. Color-code using global CSS variables (add to `index.css` if not already present). Clicking a status immediately calls `PATCH /crm/customers/{id}/relationship-status`.
|
||||||
|
- **Row 2 — Customer info**: All fields except Name and Organization (shown in page header). Include language, religion, tags, etc.
|
||||||
|
- **Row 3 — Contacts**: All contact entries (phone, email, WhatsApp, etc.).
|
||||||
|
- **Row 4 — Notes**: Responsive column grid. 1 column below 1100px, 2 columns 1100–2000px, 3 columns above 2000px. Masonry/wrap layout with no gaps between note cards.
|
||||||
|
- Move the Latest Orders section to just below the hero section, before Latest Communications.
|
||||||
|
Hide this section entirely if no orders exist for this customer.
|
||||||
|
- For all other sections (Latest Communications, Latest Quotations, Devices): hide each section
|
||||||
|
entirely if it has no data. Show dynamically when data exists.
|
||||||
|
|
||||||
|
#### New "Support" Tab (add to TABS array, after Overview)
|
||||||
|
Two full-width section cards:
|
||||||
|
|
||||||
|
**Technical Issues Card**
|
||||||
|
- Header shows active count badge (e.g. "2 active")
|
||||||
|
- All issues listed, newest first (active and resolved)
|
||||||
|
- Each row: colored status dot, opened date, note, opened_by — "Resolve" button if active
|
||||||
|
- If more than 5 items: list is scrollable (fixed max-height), does not expand the page
|
||||||
|
- "Report New Issue" button → small inline form with note field + submit
|
||||||
|
|
||||||
|
**Install Support Card**
|
||||||
|
- Identical structure to Technical Issues card
|
||||||
|
- Same scrollable behavior if more than 5 items
|
||||||
|
|
||||||
|
#### New "Financials" Tab (add to TABS array, after Support)
|
||||||
|
Two sections:
|
||||||
|
|
||||||
|
**Active Order Payment Status** (shown only if an active order exists)
|
||||||
|
- required_amount, received_amount, balance_due
|
||||||
|
- Advance required indicator + advance amount if applicable
|
||||||
|
- Payment complete indicator
|
||||||
|
|
||||||
|
**Transaction History**
|
||||||
|
- Ledger table: Date | Flow | Amount | Currency | Method | Category | Order Ref | Invoice Ref | Note | Recorded By | Actions
|
||||||
|
- "Add Transaction" button → modal with all TransactionEntry fields
|
||||||
|
- Totals row: Total Invoiced vs Total Paid vs Outstanding Balance
|
||||||
|
- Each row: right-aligned **Actions** button (consistent with other tables in the project)
|
||||||
|
with options: **Edit** (opens edit form) and **Delete** (requires confirmation dialog)
|
||||||
|
|
||||||
|
#### Orders Tab (existing — update in place)
|
||||||
|
- Each order card/row shows:
|
||||||
|
- `title` as primary heading
|
||||||
|
- `status` with human-readable label and color coding (see Section 4)
|
||||||
|
- `payment_status` summary: required / received / balance due
|
||||||
|
- **"View Timeline"** toggle: expands a vertical event log below the order card
|
||||||
|
- **"Add Timeline Event"** button: small inline form with type dropdown + note field
|
||||||
|
- Update all API calls to use `/crm/customers/{customer_id}/orders/` routes.
|
||||||
|
|
||||||
|
### 3C. `frontend/src/crm/customers/CustomerForm.jsx`
|
||||||
|
|
||||||
|
- Remove `negotiating` and `has_problem` fields.
|
||||||
|
- Add `relationship_status` dropdown (default: `"lead"`).
|
||||||
|
- No issue/transaction forms needed here — managed from the detail page.
|
||||||
|
|
||||||
|
### 3D. `frontend/src/crm/orders/OrderForm.jsx` and `OrderDetail.jsx`
|
||||||
|
|
||||||
|
- Update status dropdown with new values and labels:
|
||||||
|
- `negotiating` → "Negotiating"
|
||||||
|
- `awaiting_quotation` → "Awaiting Quotation"
|
||||||
|
- `awaiting_customer_confirmation` → "Awaiting Customer Confirmation"
|
||||||
|
- `awaiting_fulfilment` → "Awaiting Fulfilment"
|
||||||
|
- `awaiting_payment` → "Awaiting Payment"
|
||||||
|
- `manufacturing` → "Manufacturing"
|
||||||
|
- `shipped` → "Shipped"
|
||||||
|
- `installed` → "Installed"
|
||||||
|
- `declined` → "Declined"
|
||||||
|
- `complete` → "Complete"
|
||||||
|
- Add `title` input field (required).
|
||||||
|
- Replace flat `payment_status` enum with the new `payment_status` object fields.
|
||||||
|
- Add Timeline section to `OrderDetail.jsx`: vertical event log + add-entry inline form.
|
||||||
|
- Update all API calls to use `/crm/customers/{customer_id}/orders/` routes.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 4. Status Color Coding Reference
|
||||||
|
|
||||||
|
Define all as CSS variables in `index.css` and use consistently across all views:
|
||||||
|
|
||||||
|
### Relationship Status
|
||||||
|
| Status | Color |
|
||||||
|
|---|---|
|
||||||
|
| lead | grey / muted |
|
||||||
|
| prospect | blue |
|
||||||
|
| active | green |
|
||||||
|
| inactive | amber |
|
||||||
|
| churned | dark or soft red |
|
||||||
|
|
||||||
|
### Order Status
|
||||||
|
| Status | Color |
|
||||||
|
|---|---|
|
||||||
|
| negotiating | blue |
|
||||||
|
| awaiting_quotation | purple |
|
||||||
|
| awaiting_customer_confirmation | indigo |
|
||||||
|
| awaiting_fulfilment | amber |
|
||||||
|
| awaiting_payment | orange |
|
||||||
|
| manufacturing | cyan |
|
||||||
|
| shipped | teal |
|
||||||
|
| installed | green |
|
||||||
|
| declined | red |
|
||||||
|
| complete | muted/grey |
|
||||||
|
|
||||||
|
### Issue / Support Flags
|
||||||
|
| State | Color |
|
||||||
|
|---|---|
|
||||||
|
| active issue | red |
|
||||||
|
| active support | amber |
|
||||||
|
| resolved | muted/grey |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 5. Migration Notes
|
||||||
|
|
||||||
|
- The old `negotiating` and `has_problem` fields will remain in Firestore until the migration script is run. The backend should **read both old and new fields** during the transition period, preferring the new structure if present.
|
||||||
|
- A one-time migration script (`backend/migrate_customer_flags.py`) should:
|
||||||
|
1. Read all customer documents
|
||||||
|
2. If `negotiating: true` → create an order in the customer's `orders` subcollection with `status = "negotiating"` and set `relationship_status = "active"` on the customer
|
||||||
|
3. If `has_problem: true` → append one entry to `technical_issues` with `active: true`, `opened_date: customer.updated_at`, `note: "Migrated from legacy has_problem flag"`, `opened_by: "system"`
|
||||||
|
4. Remove `negotiating` and `has_problem` from the customer document
|
||||||
|
- Do **not** run the migration script until all frontend and backend changes are deployed and tested.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 6. File Summary — What to Touch
|
||||||
|
|
||||||
|
```
|
||||||
|
backend/crm/models.py ← model updates (primary changes)
|
||||||
|
backend/crm/customers_router.py ← new endpoints + field updates
|
||||||
|
backend/crm/orders_router.py ← remove top-level routes, re-implement as subcollection,
|
||||||
|
add timeline + payment-status + init-negotiations endpoints
|
||||||
|
backend/migrate_customer_flags.py ← NEW one-time migration script
|
||||||
|
|
||||||
|
frontend/src/index.css ← add CSS variables for all new status colors
|
||||||
|
frontend/src/crm/customers/CustomerList.jsx ← relationship status chip + support flag dots column
|
||||||
|
frontend/src/crm/customers/CustomerDetail.jsx ← page header, 3 new quick-entry buttons + modals,
|
||||||
|
Overview tab overhaul, new Support tab,
|
||||||
|
new Financials tab, Orders tab updates
|
||||||
|
frontend/src/crm/customers/CustomerForm.jsx ← remove old flags, add relationship_status
|
||||||
|
frontend/src/crm/orders/OrderForm.jsx ← new status values, title field, payment_status,
|
||||||
|
updated API route paths
|
||||||
|
frontend/src/crm/orders/OrderDetail.jsx ← timeline section, updated status/payment,
|
||||||
|
updated API route paths
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 7. Do NOT Change (out of scope)
|
||||||
|
|
||||||
|
- Quotations system — leave as-is
|
||||||
|
- Communications / inbox — leave as-is
|
||||||
|
- Files & Media tab — leave as-is
|
||||||
|
- Devices tab — leave as-is
|
||||||
|
- Any other module outside `crm/`
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
You are working on the bellsystems-cp project at ~/bellsystems-cp/.
|
|
||||||
Read BellSystems_AdminPanel_Strategy.md for the full project strategy.
|
|
||||||
|
|
||||||
We are now building Phase X — [Phase Name].
|
|
||||||
|
|
||||||
Review the existing codebase first, then implement the following:
|
|
||||||
[list of tasks for that phase]
|
|
||||||
|
|
||||||
Ask me before making any major architectural decisions.
|
|
||||||
Commit when done.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Esperinos-Adamn-1k: 1,0,1,0,1,0,0,0,1,0,1,0,1,0,0,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,0,0
|
|
||||||
@@ -1,26 +0,0 @@
|
|||||||
Doxology_Traditional: 1,2,1,2,1,3,0,0,1,2,1,2,1,4,0,0,1,2,1,2,1,2,1,2,1,2,1,3,1,4,0,0
|
|
||||||
Doxology_Alternative: 1,0,2,3,0,4+5,0,1,0,2,3,0,4+5,0,1,0,2,3,0,4+5,0,1,2,1,2,3,0,4+5,0
|
|
||||||
Doxology_Festive: 2,3,4+1,3,2,3,5+1,3,2,3,6+1,3,2,3,5+1,3
|
|
||||||
Vesper_Traditional: 1,2,3,0,1,2,3,0,1,2,1,2,1,2,3,0
|
|
||||||
Vesper_Alternative: 1,2,0,0,1,2,0,0,1,3,0,0,0,0,0,0,1,2,0,0,1,2,0,0,1,4,0,0,0,0,0,0,1,2,0,0,1,2,0,0,1,2,0,0,1,2,0,0,1,2,0,0,1,3,0,0,1,4,0,0,0,0,0,0
|
|
||||||
Catehetical: 1,2,3,4,5
|
|
||||||
Orthros_Traditional: 1,0,2,0,3,4,0,5,0,6,0,7,8,0
|
|
||||||
Orthros_Alternative: 1,0,2,1,0,2,0,1,0,1,2,1,0,3,0
|
|
||||||
Mournfull_Toll: 1,0,0,0,0,0,0,1,0,0,0,0,0,0,3,0,0,0,0,0,0,3,0,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,4,0,0,0,0,0,0,4,0,0,0,0,0,0
|
|
||||||
Mournfull_Toll_Alternative: 1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,3,0,0,0,0,0,0,3,3,0,0,0,0,0,0,2,0,0,0,0,0,0,2,2,0,0,0,0,0,0,4,0,0,0,0,0,0,4,4,0,0,0,0,0,0
|
|
||||||
Mournfull_Toll_Meg_Par: 1,0,0,0,0,0,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,3,0,0,0,0,0,0,3,0,0,0,0,0,0,3,3,0,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,2,2,0,0,0,0,0,0,4,0,0,0,0,0,0,4,0,0,0,0,0,0,4,4,0,0,0,0,0,0
|
|
||||||
Sematron: 1,1,1,2,1,1,1,4,1,1,1,2,1,3,1,4
|
|
||||||
Sematron_Alternative: 1,1,1,2,1,1,1,4,1,2,1,3,1,1,1,4
|
|
||||||
Athonite_1_2_Voices: 1,2,1,1,2,1,1,2,1,1,2,1,2
|
|
||||||
Athonite_3_Voices: 2,1,0,0,2,1,0,0,2,1,2,1,2,1,0,1,2,1,0,1,2,1,0,1,2,1,2,1,2,1,0,1,2,1,0,1,2,1,0,1,2,1,2,1,2,1,3,1,2,1,3,1,2,1,3,1,2,1,2,1,2,1,3,1,2,1,3,1,2,1,3,1,2,1,2,1,2,1,3
|
|
||||||
Athonite_3_4_Voices: 2,1,0,0,2,1,0,0,2,1,2,1,2,1,0,1,2,1,0,1,2,1,0,1,2,1,2,1,2,1,0,1,2,1,0,1,2,1,0,1,2,1,2,1,2,1,3,1,2,1,0,3+1,2,1,0,3+1,2,1,0,3+1,2,1,2,3+1,2,1,4,3+1,2,1,0,3+1,2,1,0,3+1,2,1,2,3+1,2,1,4+1,2,1,3+1,2,1,4+2,2,1,3+2,2,1,4+1,2,1,3+1,2,1,4+2,2,1,3+2,2,1,4+1
|
|
||||||
Athonite_4_8_Voices: 2,1,0,0,2,1,0,0,2,1,2,1,2,1,0,1,2,1,0,1,2,1,0,1,2,1,2,1,2,1,0,1,2,1,0,1,2,1,0,1,2,1,2,1,2,1,3,1,2,1,0,3+1,2,1,0,3+1,2,1,0,3+1,2,1,2,3+1,2,1,4,3+1,2,1,0,3+1,2,1,0,3+1,2,1,2,3+1,2,1,4+1,2,1,5+1,2,1,6+2,2,1,8+1,2,1,4+2,2,1,7+1,2,1,5+2,2,1,6+1,2,1,8+2,2,1,4+1,2,1,7+2,2,1,5+1,2,1,6+2,2,1,8+1,2,1,4+2,2,1,7+1,2,1,0,3+1,2,1,0,3+1,2,1,2,3+1,2,1,0,0,0
|
|
||||||
OneByOne_2_3_Voices: 1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,2,1,2,1,2,1,2,1,2,1,2,1,2,1,2,1,2,1,3,1,2,1,3,1,2,1,3,1,2,1,3,1,2,1,3,1,2,1,3,1,2,1,3,1,2,1,3,1,2,1,3,1,2,1,3,1,2,1,3,1,2,1,3,1,2,1,2,1,2,1,2,1,2,1,2,1,2,1,2
|
|
||||||
OneByOne_4_8_Voices: 1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,2,1,2,1,2,1,2,1,2,1,2,1,2,1,2,1,2,1,3,1,2,1,3,1,2,1,3,1,2,1,3,2,3,4,3,2,5+1,2,3,4,3,2,6+1,2,3,4,3,2,7+1,2,3,4,3,2,8+1,2,3,4,3,2,7+1,2,3,4,3,2,6+1,2,3,4,3,2,7+1,2,3,4,3,2,8+1,2,3,4,3,2,7+1,2,3,4,3,2,6+1,2,3,4,3,2,1,2,1,3,1,2,1,3,1,2,1,3,1,2,1,3,1,2,1,3,1,2,1,2,1,2,1,2,1,2,1,2,1,0
|
|
||||||
Festive_1Voice: 1,1,1,0,1,1,1,1,0,1,0,1,1,1,0,1,1,1,1,1,1,0,1,0
|
|
||||||
Festive_4Voices: 1,2,3,4+1,2,1,3,4+1
|
|
||||||
Festive_5Voices: 1,2,3,2,1,2,1,2,4,2,1,2,1,2,3,2,1,2,1,2,5,2,1,2
|
|
||||||
Festive_5Voice_Alternative: 3,2,4,1,3,3,2,4,1,5,3,2,4,1,3,3,2,4,1,5+1,3,2,4,1,3,3,2,4,1,5+1,3,2,4,1,3+1,3,2,4,1,5+1,3,2,4,1,3+1,3,2,4,1,5+1,3,2,4,1,3,3,2,4,1,5,3,2,4,1,3,3,2,4,1,5
|
|
||||||
Festive_6Voices: 1,2,3,2,1,2,1,2,4,2,1,2,1,2,3,2,1,2,1,2,5,2,1,2,1,2,4+1,2,1,2,5+1,2,1,2,3+1,2,1,2,6+1,2,1,2,4+1,2,1,2,5+1,2,1,2,3+1,2,1,2,6+1,2,1,2,4+1,2,1,2,5+1,2,1,2,3+1,2,1,2,6+1,2,1,2
|
|
||||||
Festive_8Voices: 1,2,3,4,5,6,7,8
|
|
||||||
Ormilia: 2,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,2,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,2,1,0,1,2,1,0,1,2,1,0,1,0,1,0,1,2,1,2,1,2,1,2,1,0,1,0,1,0,1,0,1,2,4+1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,2,3+1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,2,4+1,0,1,2,3+1,0,1,2,4+1,0,1,2,3+1,0,1,2,5+1,2,1,2,6+1,2,1,2,5+1,2,1,2,6+1,2,7+1,2,8+1,2,4+1,2,7+1,2,8+1,2,4+1,2,7+1,2,8+1,2,3+1,2,1,0
|
|
||||||
@@ -1,215 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
Bell Melody Converter
|
|
||||||
Converts human-readable bell notation to binary .bsm files for ESP32
|
|
||||||
|
|
||||||
Format: MELODY_NAME: 1,2,3+4,0,5+6+7
|
|
||||||
Output: MELODY_NAME.bsm (binary file, uint16_t big-endian)
|
|
||||||
"""
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import re
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import List, Tuple
|
|
||||||
|
|
||||||
|
|
||||||
def parse_bell_notation(notation: str) -> int:
|
|
||||||
"""
|
|
||||||
Convert human-readable bell notation to bit flag value
|
|
||||||
|
|
||||||
Examples:
|
|
||||||
"2+8" → bells 2,8 → bits 1,7 → 0x0082 (130)
|
|
||||||
"4" → bell 4 → bit 3 → 0x0008 (8)
|
|
||||||
"1+2+3" → bells 1,2,3 → bits 0,1,2 → 0x0007 (7)
|
|
||||||
"0" → no bells → 0x0000 (0)
|
|
||||||
|
|
||||||
Formula: Bell #N → Bit (N-1) → Value = 1 << (N-1)
|
|
||||||
"""
|
|
||||||
notation = notation.strip()
|
|
||||||
|
|
||||||
# Handle zero/silence
|
|
||||||
if notation == '0' or not notation:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
# Split by + to get individual bell numbers
|
|
||||||
bell_numbers = notation.split('+')
|
|
||||||
|
|
||||||
value = 0
|
|
||||||
for bell_str in bell_numbers:
|
|
||||||
try:
|
|
||||||
bell_num = int(bell_str.strip())
|
|
||||||
|
|
||||||
if bell_num == 0:
|
|
||||||
continue # Bell 0 means silence, contributes nothing
|
|
||||||
|
|
||||||
if bell_num < 1 or bell_num > 16:
|
|
||||||
print(f"Warning: Bell number {bell_num} out of range (1-16), skipping")
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Convert bell number to bit position (1-indexed to 0-indexed)
|
|
||||||
bit_position = bell_num - 1
|
|
||||||
bit_value = 1 << bit_position
|
|
||||||
value |= bit_value
|
|
||||||
|
|
||||||
except ValueError:
|
|
||||||
print(f"Warning: Invalid bell number '{bell_str}', skipping")
|
|
||||||
continue
|
|
||||||
|
|
||||||
return value
|
|
||||||
|
|
||||||
|
|
||||||
def parse_melody_line(line: str) -> Tuple[str, List[int]]:
|
|
||||||
"""
|
|
||||||
Parse a melody line in format: MELODY_NAME: step,step,step
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
(melody_name, list of uint16_t values)
|
|
||||||
"""
|
|
||||||
line = line.strip()
|
|
||||||
|
|
||||||
if not line or line.startswith('#'):
|
|
||||||
return None, []
|
|
||||||
|
|
||||||
# Split by colon
|
|
||||||
if ':' not in line:
|
|
||||||
print(f"Warning: Invalid format (missing ':'): {line}")
|
|
||||||
return None, []
|
|
||||||
|
|
||||||
parts = line.split(':', 1)
|
|
||||||
melody_name = parts[0].strip()
|
|
||||||
steps_str = parts[1].strip()
|
|
||||||
|
|
||||||
if not melody_name:
|
|
||||||
print(f"Warning: Empty melody name in line: {line}")
|
|
||||||
return None, []
|
|
||||||
|
|
||||||
# Parse steps (comma-separated)
|
|
||||||
step_strings = steps_str.split(',')
|
|
||||||
values = []
|
|
||||||
|
|
||||||
for i, step_str in enumerate(step_strings):
|
|
||||||
value = parse_bell_notation(step_str)
|
|
||||||
values.append(value)
|
|
||||||
|
|
||||||
return melody_name, values
|
|
||||||
|
|
||||||
|
|
||||||
def write_binary_melody(filepath: str, values: List[int]):
|
|
||||||
"""
|
|
||||||
Write melody values as binary file (uint16_t, big-endian)
|
|
||||||
|
|
||||||
Args:
|
|
||||||
filepath: Output file path
|
|
||||||
values: List of uint16_t values (0-65535)
|
|
||||||
"""
|
|
||||||
with open(filepath, 'wb') as f:
|
|
||||||
for value in values:
|
|
||||||
# Ensure value fits in uint16_t
|
|
||||||
if value > 0xFFFF:
|
|
||||||
print(f"Warning: Value {value} exceeds uint16_t range, truncating")
|
|
||||||
value = value & 0xFFFF
|
|
||||||
|
|
||||||
# Write as 2 bytes, big-endian (MSB first)
|
|
||||||
f.write(value.to_bytes(2, byteorder='big'))
|
|
||||||
|
|
||||||
|
|
||||||
def convert_melodies_file(input_path: str, output_dir: str = '.'):
|
|
||||||
"""
|
|
||||||
Convert multi-melody file to individual .bsm binary files
|
|
||||||
|
|
||||||
Args:
|
|
||||||
input_path: Path to input text file
|
|
||||||
output_dir: Directory for output .bsm files
|
|
||||||
"""
|
|
||||||
output_path = Path(output_dir)
|
|
||||||
output_path.mkdir(parents=True, exist_ok=True)
|
|
||||||
|
|
||||||
melodies_created = 0
|
|
||||||
total_steps = 0
|
|
||||||
|
|
||||||
try:
|
|
||||||
with open(input_path, 'r', encoding='utf-8') as f:
|
|
||||||
lines = f.readlines()
|
|
||||||
|
|
||||||
print(f"Reading from: {input_path}")
|
|
||||||
print(f"Output directory: {output_path.absolute()}\n")
|
|
||||||
|
|
||||||
for line_num, line in enumerate(lines, 1):
|
|
||||||
melody_name, values = parse_melody_line(line)
|
|
||||||
|
|
||||||
if melody_name and values:
|
|
||||||
# Create output filename
|
|
||||||
output_file = output_path / f"{melody_name}.bsm"
|
|
||||||
|
|
||||||
# Write binary file
|
|
||||||
write_binary_melody(str(output_file), values)
|
|
||||||
|
|
||||||
# Calculate file size
|
|
||||||
file_size = len(values) * 2 # 2 bytes per uint16_t
|
|
||||||
|
|
||||||
# Show what bells are used
|
|
||||||
all_bells = set()
|
|
||||||
for value in values:
|
|
||||||
for bit in range(16):
|
|
||||||
if value & (1 << bit):
|
|
||||||
all_bells.add(bit + 1) # Convert back to 1-indexed
|
|
||||||
|
|
||||||
bells_str = ','.join(map(str, sorted(all_bells))) if all_bells else 'none'
|
|
||||||
|
|
||||||
print(f"✓ {melody_name}.bsm")
|
|
||||||
print(f" Steps: {len(values)}")
|
|
||||||
print(f" Size: {file_size} bytes")
|
|
||||||
print(f" Bells used: {bells_str}")
|
|
||||||
print()
|
|
||||||
|
|
||||||
melodies_created += 1
|
|
||||||
total_steps += len(values)
|
|
||||||
|
|
||||||
print(f"{'='*50}")
|
|
||||||
print(f"✓ Successfully created {melodies_created} melody files")
|
|
||||||
print(f" Total steps: {total_steps}")
|
|
||||||
print(f" Total size: {total_steps * 2} bytes")
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
except FileNotFoundError:
|
|
||||||
print(f"Error: Input file '{input_path}' not found")
|
|
||||||
return False
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error: {e}")
|
|
||||||
import traceback
|
|
||||||
traceback.print_exc()
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
"""Main entry point"""
|
|
||||||
print("=== Bell Melody Converter ===")
|
|
||||||
print("Creates binary .bsm files for ESP32\n")
|
|
||||||
|
|
||||||
# Default input file
|
|
||||||
input_file = "all_melodies.txt"
|
|
||||||
output_dir = "."
|
|
||||||
|
|
||||||
# Check if file exists
|
|
||||||
if not Path(input_file).exists():
|
|
||||||
print(f"Error: '{input_file}' not found in current directory!")
|
|
||||||
print("\nPlease create 'all_melodies.txt' with format:")
|
|
||||||
print(" MELODY_NAME: step,step,step,...")
|
|
||||||
print("\nStep notation:")
|
|
||||||
print(" 0 - Silence")
|
|
||||||
print(" 4 - Bell #4 only")
|
|
||||||
print(" 2+8 - Bells #2 and #8 together")
|
|
||||||
print(" 1+2+3 - Bells #1, #2, and #3 together")
|
|
||||||
print("\nExample:")
|
|
||||||
print(" JINGLE_BELLS: 4,4,4,0,4,4,4,0,4,8,1,2,4")
|
|
||||||
print(" ALARM: 2+8,0,2+8,0,2+8,0")
|
|
||||||
print(" HAPPY_BIRTHDAY: 1,1,2,1,4,3,0,1,1,2,1,8,4")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
success = convert_melodies_file(input_file, output_dir)
|
|
||||||
sys.exit(0 if success else 1)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
||||||
@@ -1,300 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
Bell Melody to C Header Converter
|
|
||||||
Converts human-readable bell notation to C header file with PROGMEM arrays
|
|
||||||
|
|
||||||
Input Format: MELODY_NAME: 1,2,3+4,0,5+6+7
|
|
||||||
Output: melodies.h (C header with const uint16_t PROGMEM arrays)
|
|
||||||
"""
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import re
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import List, Tuple
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
|
|
||||||
def parse_bell_notation(notation: str) -> int:
|
|
||||||
"""
|
|
||||||
Convert human-readable bell notation to bit flag value
|
|
||||||
|
|
||||||
Examples:
|
|
||||||
"2+8" → bells 2,8 → bits 1,7 → 0x0082 (130)
|
|
||||||
"4" → bell 4 → bit 3 → 0x0008 (8)
|
|
||||||
"1+2+3" → bells 1,2,3 → bits 0,1,2 → 0x0007 (7)
|
|
||||||
"0" → no bells → 0x0000 (0)
|
|
||||||
|
|
||||||
Formula: Bell #N → Bit (N-1) → Value = 1 << (N-1)
|
|
||||||
"""
|
|
||||||
notation = notation.strip()
|
|
||||||
|
|
||||||
# Handle zero/silence
|
|
||||||
if notation == '0' or not notation:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
# Split by + to get individual bell numbers
|
|
||||||
bell_numbers = notation.split('+')
|
|
||||||
|
|
||||||
value = 0
|
|
||||||
for bell_str in bell_numbers:
|
|
||||||
try:
|
|
||||||
bell_num = int(bell_str.strip())
|
|
||||||
|
|
||||||
if bell_num == 0:
|
|
||||||
continue # Bell 0 means silence, contributes nothing
|
|
||||||
|
|
||||||
if bell_num < 1 or bell_num > 16:
|
|
||||||
print(f"Warning: Bell number {bell_num} out of range (1-16), skipping")
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Convert bell number to bit position (1-indexed to 0-indexed)
|
|
||||||
bit_position = bell_num - 1
|
|
||||||
bit_value = 1 << bit_position
|
|
||||||
value |= bit_value
|
|
||||||
|
|
||||||
except ValueError:
|
|
||||||
print(f"Warning: Invalid bell number '{bell_str}', skipping")
|
|
||||||
continue
|
|
||||||
|
|
||||||
return value
|
|
||||||
|
|
||||||
|
|
||||||
def parse_melody_line(line: str) -> Tuple[str, List[int]]:
|
|
||||||
"""
|
|
||||||
Parse a melody line in format: MELODY_NAME: step,step,step
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
(melody_name, list of uint16_t values)
|
|
||||||
"""
|
|
||||||
line = line.strip()
|
|
||||||
|
|
||||||
if not line or line.startswith('#'):
|
|
||||||
return None, []
|
|
||||||
|
|
||||||
# Split by colon
|
|
||||||
if ':' not in line:
|
|
||||||
return None, []
|
|
||||||
|
|
||||||
parts = line.split(':', 1)
|
|
||||||
melody_name = parts[0].strip()
|
|
||||||
steps_str = parts[1].strip()
|
|
||||||
|
|
||||||
if not melody_name:
|
|
||||||
return None, []
|
|
||||||
|
|
||||||
# Parse steps (comma-separated)
|
|
||||||
step_strings = steps_str.split(',')
|
|
||||||
values = []
|
|
||||||
|
|
||||||
for step_str in step_strings:
|
|
||||||
value = parse_bell_notation(step_str)
|
|
||||||
values.append(value)
|
|
||||||
|
|
||||||
return melody_name, values
|
|
||||||
|
|
||||||
|
|
||||||
def format_melody_array(melody_name: str, values: List[int], values_per_line: int = 8) -> str:
|
|
||||||
"""
|
|
||||||
Format melody values as C PROGMEM array
|
|
||||||
|
|
||||||
Args:
|
|
||||||
melody_name: Name of the melody (will be prefixed with "builtin_")
|
|
||||||
values: List of uint16_t values
|
|
||||||
values_per_line: Number of hex values per line
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Formatted C array declaration
|
|
||||||
"""
|
|
||||||
array_name = f"melody_builtin_{melody_name.lower()}"
|
|
||||||
|
|
||||||
lines = []
|
|
||||||
lines.append(f"const uint16_t PROGMEM {array_name}[] = {{")
|
|
||||||
|
|
||||||
# Format values in rows
|
|
||||||
for i in range(0, len(values), values_per_line):
|
|
||||||
chunk = values[i:i + values_per_line]
|
|
||||||
hex_values = [f"0x{val:04X}" for val in chunk]
|
|
||||||
|
|
||||||
# Add comma after each value except the last one overall
|
|
||||||
if i + len(chunk) < len(values):
|
|
||||||
line = " " + ", ".join(hex_values) + ","
|
|
||||||
else:
|
|
||||||
line = " " + ", ".join(hex_values)
|
|
||||||
|
|
||||||
lines.append(line)
|
|
||||||
|
|
||||||
lines.append("};")
|
|
||||||
|
|
||||||
return "\n".join(lines)
|
|
||||||
|
|
||||||
|
|
||||||
def format_melody_info_entry(melody_name: str, display_name: str, array_size: int) -> str:
|
|
||||||
"""
|
|
||||||
Format a single MelodyInfo struct entry
|
|
||||||
|
|
||||||
Args:
|
|
||||||
melody_name: Technical name (will be prefixed with "builtin_")
|
|
||||||
display_name: Human-readable name
|
|
||||||
array_size: Number of elements in the melody array
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Formatted struct entry
|
|
||||||
"""
|
|
||||||
array_name = f"melody_builtin_{melody_name.lower()}"
|
|
||||||
id_name = f"builtin_{melody_name.lower()}"
|
|
||||||
|
|
||||||
return f""" {{
|
|
||||||
"{display_name}",
|
|
||||||
"{id_name}",
|
|
||||||
{array_name},
|
|
||||||
sizeof({array_name}) / sizeof(uint16_t)
|
|
||||||
}}"""
|
|
||||||
|
|
||||||
|
|
||||||
def convert_to_header(input_path: str, output_path: str = "melodies.h"):
|
|
||||||
"""
|
|
||||||
Convert multi-melody file to C header file
|
|
||||||
|
|
||||||
Args:
|
|
||||||
input_path: Path to input text file
|
|
||||||
output_path: Path to output .h file
|
|
||||||
"""
|
|
||||||
melodies = [] # List of (name, display_name, values)
|
|
||||||
|
|
||||||
try:
|
|
||||||
with open(input_path, 'r', encoding='utf-8') as f:
|
|
||||||
lines = f.readlines()
|
|
||||||
|
|
||||||
print(f"Reading from: {input_path}")
|
|
||||||
print(f"Output file: {output_path}\n")
|
|
||||||
|
|
||||||
# Parse all melodies
|
|
||||||
for line_num, line in enumerate(lines, 1):
|
|
||||||
melody_name, values = parse_melody_line(line)
|
|
||||||
|
|
||||||
if melody_name and values:
|
|
||||||
# Create display name (convert underscores to spaces, title case)
|
|
||||||
display_name = melody_name.replace('_', ' ').title()
|
|
||||||
melodies.append((melody_name, display_name, values))
|
|
||||||
|
|
||||||
print(f"✓ Parsed: {display_name} ({len(values)} steps)")
|
|
||||||
|
|
||||||
if not melodies:
|
|
||||||
print("Error: No valid melodies found in input file")
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Generate header file
|
|
||||||
print(f"\n{'='*50}")
|
|
||||||
print(f"Generating C header file...\n")
|
|
||||||
|
|
||||||
with open(output_path, 'w', encoding='utf-8') as f:
|
|
||||||
# Header guard and comments
|
|
||||||
f.write("/*\n")
|
|
||||||
f.write(" * Bell Melodies - Auto-generated\n")
|
|
||||||
f.write(f" * Generated: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n")
|
|
||||||
f.write(f" * Source: {input_path}\n")
|
|
||||||
f.write(" * \n")
|
|
||||||
f.write(" * This file contains built-in melody definitions for the ESP32 bell controller\n")
|
|
||||||
f.write(" */\n\n")
|
|
||||||
f.write("#ifndef MELODIES_H\n")
|
|
||||||
f.write("#define MELODIES_H\n\n")
|
|
||||||
f.write("#include <Arduino.h>\n\n")
|
|
||||||
|
|
||||||
# Write melody arrays
|
|
||||||
f.write("// ========================================\n")
|
|
||||||
f.write("// Melody Data Arrays\n")
|
|
||||||
f.write("// ========================================\n\n")
|
|
||||||
|
|
||||||
for melody_name, display_name, values in melodies:
|
|
||||||
f.write(f"// {display_name}\n")
|
|
||||||
f.write(format_melody_array(melody_name, values))
|
|
||||||
f.write("\n\n")
|
|
||||||
|
|
||||||
# Write MelodyInfo structure definition
|
|
||||||
f.write("// ========================================\n")
|
|
||||||
f.write("// Melody Information Structure\n")
|
|
||||||
f.write("// ========================================\n\n")
|
|
||||||
f.write("struct MelodyInfo {\n")
|
|
||||||
f.write(" const char* display_name;\n")
|
|
||||||
f.write(" const char* id;\n")
|
|
||||||
f.write(" const uint16_t* data;\n")
|
|
||||||
f.write(" size_t length;\n")
|
|
||||||
f.write("};\n\n")
|
|
||||||
|
|
||||||
# Write melody library array
|
|
||||||
f.write("// ========================================\n")
|
|
||||||
f.write("// Melody Library\n")
|
|
||||||
f.write("// ========================================\n\n")
|
|
||||||
f.write("const MelodyInfo MELODY_LIBRARY[] = {\n")
|
|
||||||
|
|
||||||
for i, (melody_name, display_name, values) in enumerate(melodies):
|
|
||||||
entry = format_melody_info_entry(melody_name, display_name, len(values))
|
|
||||||
|
|
||||||
# Add comma except for last entry
|
|
||||||
if i < len(melodies) - 1:
|
|
||||||
f.write(entry + ",\n")
|
|
||||||
else:
|
|
||||||
f.write(entry + "\n")
|
|
||||||
|
|
||||||
f.write("};\n\n")
|
|
||||||
|
|
||||||
# Add library size constant
|
|
||||||
f.write(f"const size_t MELODY_LIBRARY_SIZE = {len(melodies)};\n\n")
|
|
||||||
|
|
||||||
# Close header guard
|
|
||||||
f.write("#endif // MELODIES_H\n")
|
|
||||||
|
|
||||||
# Summary
|
|
||||||
print(f"✓ Successfully created {output_path}")
|
|
||||||
print(f" Melodies: {len(melodies)}")
|
|
||||||
|
|
||||||
total_steps = sum(len(values) for _, _, values in melodies)
|
|
||||||
print(f" Total steps: {total_steps}")
|
|
||||||
print(f" Estimated PROGMEM usage: {total_steps * 2} bytes")
|
|
||||||
|
|
||||||
print(f"\n{'='*50}")
|
|
||||||
print("Done! Include this file in your ESP32 project.")
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
except FileNotFoundError:
|
|
||||||
print(f"Error: Input file '{input_path}' not found")
|
|
||||||
return False
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error: {e}")
|
|
||||||
import traceback
|
|
||||||
traceback.print_exc()
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
"""Main entry point"""
|
|
||||||
print("=== Bell Melody to C Header Converter ===")
|
|
||||||
print("Creates melodies.h for ESP32 firmware\n")
|
|
||||||
|
|
||||||
# Default input file
|
|
||||||
input_file = "builtin_melodies.txt"
|
|
||||||
output_file = "melodies.h"
|
|
||||||
|
|
||||||
# Check if file exists
|
|
||||||
if not Path(input_file).exists():
|
|
||||||
print(f"Error: '{input_file}' not found in current directory!")
|
|
||||||
print("\nPlease create 'builtin_melodies.txt' with format:")
|
|
||||||
print(" MELODY_NAME: step,step,step,...")
|
|
||||||
print("\nStep notation:")
|
|
||||||
print(" 0 - Silence")
|
|
||||||
print(" 4 - Bell #4 only")
|
|
||||||
print(" 2+8 - Bells #2 and #8 together")
|
|
||||||
print(" 1+2+3 - Bells #1, #2, and #3 together")
|
|
||||||
print("\nExample:")
|
|
||||||
print(" JINGLE_BELLS: 4,4,4,0,4,4,4,0,4,8,1,2,4")
|
|
||||||
print(" ALARM: 2+8,0,2+8,0,2+8,0")
|
|
||||||
print(" HAPPY_BIRTHDAY: 1,1,2,1,4,3,0,1,1,2,1,8,4")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
success = convert_to_header(input_file, output_file)
|
|
||||||
sys.exit(0 if success else 1)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
||||||
@@ -1,475 +0,0 @@
|
|||||||
/*
|
|
||||||
* Bell Melodies - Auto-generated
|
|
||||||
* Generated: 2026-01-20 09:19:43
|
|
||||||
* Source: builtin_melodies.txt
|
|
||||||
*
|
|
||||||
* This file contains built-in melody definitions for the ESP32 bell controller
|
|
||||||
*/
|
|
||||||
|
|
||||||
#ifndef MELODIES_H
|
|
||||||
#define MELODIES_H
|
|
||||||
|
|
||||||
#include <Arduino.h>
|
|
||||||
|
|
||||||
// ========================================
|
|
||||||
// Melody Data Arrays
|
|
||||||
// ========================================
|
|
||||||
|
|
||||||
// Doxology Traditional
|
|
||||||
const uint16_t PROGMEM melody_builtin_doxology_traditional[] = {
|
|
||||||
0x0001, 0x0002, 0x0001, 0x0002, 0x0001, 0x0004, 0x0000, 0x0000,
|
|
||||||
0x0001, 0x0002, 0x0001, 0x0002, 0x0001, 0x0008, 0x0000, 0x0000,
|
|
||||||
0x0001, 0x0002, 0x0001, 0x0002, 0x0001, 0x0002, 0x0001, 0x0002,
|
|
||||||
0x0001, 0x0002, 0x0001, 0x0004, 0x0001, 0x0008, 0x0000, 0x0000
|
|
||||||
};
|
|
||||||
|
|
||||||
// Doxology Alternative
|
|
||||||
const uint16_t PROGMEM melody_builtin_doxology_alternative[] = {
|
|
||||||
0x0001, 0x0000, 0x0002, 0x0004, 0x0000, 0x0018, 0x0000, 0x0001,
|
|
||||||
0x0000, 0x0002, 0x0004, 0x0000, 0x0018, 0x0000, 0x0001, 0x0000,
|
|
||||||
0x0002, 0x0004, 0x0000, 0x0018, 0x0000, 0x0001, 0x0002, 0x0001,
|
|
||||||
0x0002, 0x0004, 0x0000, 0x0018, 0x0000
|
|
||||||
};
|
|
||||||
|
|
||||||
// Doxology Festive
|
|
||||||
const uint16_t PROGMEM melody_builtin_doxology_festive[] = {
|
|
||||||
0x0002, 0x0004, 0x0009, 0x0004, 0x0002, 0x0004, 0x0011, 0x0004,
|
|
||||||
0x0002, 0x0004, 0x0021, 0x0004, 0x0002, 0x0004, 0x0011, 0x0004
|
|
||||||
};
|
|
||||||
|
|
||||||
// Vesper Traditional
|
|
||||||
const uint16_t PROGMEM melody_builtin_vesper_traditional[] = {
|
|
||||||
0x0001, 0x0002, 0x0004, 0x0000, 0x0001, 0x0002, 0x0004, 0x0000,
|
|
||||||
0x0001, 0x0002, 0x0001, 0x0002, 0x0001, 0x0002, 0x0004, 0x0000
|
|
||||||
};
|
|
||||||
|
|
||||||
// Vesper Alternative
|
|
||||||
const uint16_t PROGMEM melody_builtin_vesper_alternative[] = {
|
|
||||||
0x0001, 0x0002, 0x0000, 0x0000, 0x0001, 0x0002, 0x0000, 0x0000,
|
|
||||||
0x0001, 0x0004, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000,
|
|
||||||
0x0001, 0x0002, 0x0000, 0x0000, 0x0001, 0x0002, 0x0000, 0x0000,
|
|
||||||
0x0001, 0x0008, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000,
|
|
||||||
0x0001, 0x0002, 0x0000, 0x0000, 0x0001, 0x0002, 0x0000, 0x0000,
|
|
||||||
0x0001, 0x0002, 0x0000, 0x0000, 0x0001, 0x0002, 0x0000, 0x0000,
|
|
||||||
0x0001, 0x0002, 0x0000, 0x0000, 0x0001, 0x0004, 0x0000, 0x0000,
|
|
||||||
0x0001, 0x0008, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000
|
|
||||||
};
|
|
||||||
|
|
||||||
// Catehetical
|
|
||||||
const uint16_t PROGMEM melody_builtin_catehetical[] = {
|
|
||||||
0x0001, 0x0002, 0x0004, 0x0008, 0x0010
|
|
||||||
};
|
|
||||||
|
|
||||||
// Orthros Traditional
|
|
||||||
const uint16_t PROGMEM melody_builtin_orthros_traditional[] = {
|
|
||||||
0x0001, 0x0000, 0x0002, 0x0000, 0x0004, 0x0008, 0x0000, 0x0010,
|
|
||||||
0x0000, 0x0020, 0x0000, 0x0040, 0x0080, 0x0000
|
|
||||||
};
|
|
||||||
|
|
||||||
// Orthros Alternative
|
|
||||||
const uint16_t PROGMEM melody_builtin_orthros_alternative[] = {
|
|
||||||
0x0001, 0x0000, 0x0002, 0x0001, 0x0000, 0x0002, 0x0000, 0x0001,
|
|
||||||
0x0000, 0x0001, 0x0002, 0x0001, 0x0000, 0x0004, 0x0000
|
|
||||||
};
|
|
||||||
|
|
||||||
// Mournfull Toll
|
|
||||||
const uint16_t PROGMEM melody_builtin_mournfull_toll[] = {
|
|
||||||
0x0001, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0001,
|
|
||||||
0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0004, 0x0000,
|
|
||||||
0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0004, 0x0000, 0x0000,
|
|
||||||
0x0000, 0x0000, 0x0000, 0x0000, 0x0002, 0x0000, 0x0000, 0x0000,
|
|
||||||
0x0000, 0x0000, 0x0000, 0x0002, 0x0000, 0x0000, 0x0000, 0x0000,
|
|
||||||
0x0000, 0x0000, 0x0008, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000,
|
|
||||||
0x0000, 0x0008, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000
|
|
||||||
};
|
|
||||||
|
|
||||||
// Mournfull Toll Alternative
|
|
||||||
const uint16_t PROGMEM melody_builtin_mournfull_toll_alternative[] = {
|
|
||||||
0x0001, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0001,
|
|
||||||
0x0001, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0004,
|
|
||||||
0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0004, 0x0004,
|
|
||||||
0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0002, 0x0000,
|
|
||||||
0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0002, 0x0002, 0x0000,
|
|
||||||
0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0008, 0x0000, 0x0000,
|
|
||||||
0x0000, 0x0000, 0x0000, 0x0000, 0x0008, 0x0008, 0x0000, 0x0000,
|
|
||||||
0x0000, 0x0000, 0x0000, 0x0000
|
|
||||||
};
|
|
||||||
|
|
||||||
// Mournfull Toll Meg Par
|
|
||||||
const uint16_t PROGMEM melody_builtin_mournfull_toll_meg_par[] = {
|
|
||||||
0x0001, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0001,
|
|
||||||
0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0001, 0x0001,
|
|
||||||
0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0004, 0x0000,
|
|
||||||
0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0004, 0x0000, 0x0000,
|
|
||||||
0x0000, 0x0000, 0x0000, 0x0000, 0x0004, 0x0004, 0x0000, 0x0000,
|
|
||||||
0x0000, 0x0000, 0x0000, 0x0000, 0x0002, 0x0000, 0x0000, 0x0000,
|
|
||||||
0x0000, 0x0000, 0x0000, 0x0002, 0x0000, 0x0000, 0x0000, 0x0000,
|
|
||||||
0x0000, 0x0000, 0x0002, 0x0002, 0x0000, 0x0000, 0x0000, 0x0000,
|
|
||||||
0x0000, 0x0000, 0x0008, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000,
|
|
||||||
0x0000, 0x0008, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000,
|
|
||||||
0x0008, 0x0008, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000
|
|
||||||
};
|
|
||||||
|
|
||||||
// Sematron
|
|
||||||
const uint16_t PROGMEM melody_builtin_sematron[] = {
|
|
||||||
0x0001, 0x0001, 0x0001, 0x0002, 0x0001, 0x0001, 0x0001, 0x0008,
|
|
||||||
0x0001, 0x0001, 0x0001, 0x0002, 0x0001, 0x0004, 0x0001, 0x0008
|
|
||||||
};
|
|
||||||
|
|
||||||
// Sematron Alternative
|
|
||||||
const uint16_t PROGMEM melody_builtin_sematron_alternative[] = {
|
|
||||||
0x0001, 0x0001, 0x0001, 0x0002, 0x0001, 0x0001, 0x0001, 0x0008,
|
|
||||||
0x0001, 0x0002, 0x0001, 0x0004, 0x0001, 0x0001, 0x0001, 0x0008
|
|
||||||
};
|
|
||||||
|
|
||||||
// Athonite 1 2 Voices
|
|
||||||
const uint16_t PROGMEM melody_builtin_athonite_1_2_voices[] = {
|
|
||||||
0x0001, 0x0002, 0x0001, 0x0001, 0x0002, 0x0001, 0x0001, 0x0002,
|
|
||||||
0x0001, 0x0001, 0x0002, 0x0001, 0x0002
|
|
||||||
};
|
|
||||||
|
|
||||||
// Athonite 3 Voices
|
|
||||||
const uint16_t PROGMEM melody_builtin_athonite_3_voices[] = {
|
|
||||||
0x0002, 0x0001, 0x0000, 0x0000, 0x0002, 0x0001, 0x0000, 0x0000,
|
|
||||||
0x0002, 0x0001, 0x0002, 0x0001, 0x0002, 0x0001, 0x0000, 0x0001,
|
|
||||||
0x0002, 0x0001, 0x0000, 0x0001, 0x0002, 0x0001, 0x0000, 0x0001,
|
|
||||||
0x0002, 0x0001, 0x0002, 0x0001, 0x0002, 0x0001, 0x0000, 0x0001,
|
|
||||||
0x0002, 0x0001, 0x0000, 0x0001, 0x0002, 0x0001, 0x0000, 0x0001,
|
|
||||||
0x0002, 0x0001, 0x0002, 0x0001, 0x0002, 0x0001, 0x0004, 0x0001,
|
|
||||||
0x0002, 0x0001, 0x0004, 0x0001, 0x0002, 0x0001, 0x0004, 0x0001,
|
|
||||||
0x0002, 0x0001, 0x0002, 0x0001, 0x0002, 0x0001, 0x0004, 0x0001,
|
|
||||||
0x0002, 0x0001, 0x0004, 0x0001, 0x0002, 0x0001, 0x0004, 0x0001,
|
|
||||||
0x0002, 0x0001, 0x0002, 0x0001, 0x0002, 0x0001, 0x0004
|
|
||||||
};
|
|
||||||
|
|
||||||
// Athonite 3 4 Voices
|
|
||||||
const uint16_t PROGMEM melody_builtin_athonite_3_4_voices[] = {
|
|
||||||
0x0002, 0x0001, 0x0000, 0x0000, 0x0002, 0x0001, 0x0000, 0x0000,
|
|
||||||
0x0002, 0x0001, 0x0002, 0x0001, 0x0002, 0x0001, 0x0000, 0x0001,
|
|
||||||
0x0002, 0x0001, 0x0000, 0x0001, 0x0002, 0x0001, 0x0000, 0x0001,
|
|
||||||
0x0002, 0x0001, 0x0002, 0x0001, 0x0002, 0x0001, 0x0000, 0x0001,
|
|
||||||
0x0002, 0x0001, 0x0000, 0x0001, 0x0002, 0x0001, 0x0000, 0x0001,
|
|
||||||
0x0002, 0x0001, 0x0002, 0x0001, 0x0002, 0x0001, 0x0004, 0x0001,
|
|
||||||
0x0002, 0x0001, 0x0000, 0x0005, 0x0002, 0x0001, 0x0000, 0x0005,
|
|
||||||
0x0002, 0x0001, 0x0000, 0x0005, 0x0002, 0x0001, 0x0002, 0x0005,
|
|
||||||
0x0002, 0x0001, 0x0008, 0x0005, 0x0002, 0x0001, 0x0000, 0x0005,
|
|
||||||
0x0002, 0x0001, 0x0000, 0x0005, 0x0002, 0x0001, 0x0002, 0x0005,
|
|
||||||
0x0002, 0x0001, 0x0009, 0x0002, 0x0001, 0x0005, 0x0002, 0x0001,
|
|
||||||
0x000A, 0x0002, 0x0001, 0x0006, 0x0002, 0x0001, 0x0009, 0x0002,
|
|
||||||
0x0001, 0x0005, 0x0002, 0x0001, 0x000A, 0x0002, 0x0001, 0x0006,
|
|
||||||
0x0002, 0x0001, 0x0009
|
|
||||||
};
|
|
||||||
|
|
||||||
// Athonite 4 8 Voices
|
|
||||||
const uint16_t PROGMEM melody_builtin_athonite_4_8_voices[] = {
|
|
||||||
0x0002, 0x0001, 0x0000, 0x0000, 0x0002, 0x0001, 0x0000, 0x0000,
|
|
||||||
0x0002, 0x0001, 0x0002, 0x0001, 0x0002, 0x0001, 0x0000, 0x0001,
|
|
||||||
0x0002, 0x0001, 0x0000, 0x0001, 0x0002, 0x0001, 0x0000, 0x0001,
|
|
||||||
0x0002, 0x0001, 0x0002, 0x0001, 0x0002, 0x0001, 0x0000, 0x0001,
|
|
||||||
0x0002, 0x0001, 0x0000, 0x0001, 0x0002, 0x0001, 0x0000, 0x0001,
|
|
||||||
0x0002, 0x0001, 0x0002, 0x0001, 0x0002, 0x0001, 0x0004, 0x0001,
|
|
||||||
0x0002, 0x0001, 0x0000, 0x0005, 0x0002, 0x0001, 0x0000, 0x0005,
|
|
||||||
0x0002, 0x0001, 0x0000, 0x0005, 0x0002, 0x0001, 0x0002, 0x0005,
|
|
||||||
0x0002, 0x0001, 0x0008, 0x0005, 0x0002, 0x0001, 0x0000, 0x0005,
|
|
||||||
0x0002, 0x0001, 0x0000, 0x0005, 0x0002, 0x0001, 0x0002, 0x0005,
|
|
||||||
0x0002, 0x0001, 0x0009, 0x0002, 0x0001, 0x0011, 0x0002, 0x0001,
|
|
||||||
0x0022, 0x0002, 0x0001, 0x0081, 0x0002, 0x0001, 0x000A, 0x0002,
|
|
||||||
0x0001, 0x0041, 0x0002, 0x0001, 0x0012, 0x0002, 0x0001, 0x0021,
|
|
||||||
0x0002, 0x0001, 0x0082, 0x0002, 0x0001, 0x0009, 0x0002, 0x0001,
|
|
||||||
0x0042, 0x0002, 0x0001, 0x0011, 0x0002, 0x0001, 0x0022, 0x0002,
|
|
||||||
0x0001, 0x0081, 0x0002, 0x0001, 0x000A, 0x0002, 0x0001, 0x0041,
|
|
||||||
0x0002, 0x0001, 0x0000, 0x0005, 0x0002, 0x0001, 0x0000, 0x0005,
|
|
||||||
0x0002, 0x0001, 0x0002, 0x0005, 0x0002, 0x0001, 0x0000, 0x0000,
|
|
||||||
0x0000
|
|
||||||
};
|
|
||||||
|
|
||||||
// Onebyone 2 3 Voices
|
|
||||||
const uint16_t PROGMEM melody_builtin_onebyone_2_3_voices[] = {
|
|
||||||
0x0001, 0x0000, 0x0001, 0x0000, 0x0001, 0x0000, 0x0001, 0x0000,
|
|
||||||
0x0001, 0x0000, 0x0001, 0x0000, 0x0001, 0x0000, 0x0001, 0x0000,
|
|
||||||
0x0001, 0x0002, 0x0001, 0x0002, 0x0001, 0x0002, 0x0001, 0x0002,
|
|
||||||
0x0001, 0x0002, 0x0001, 0x0002, 0x0001, 0x0002, 0x0001, 0x0002,
|
|
||||||
0x0001, 0x0002, 0x0001, 0x0004, 0x0001, 0x0002, 0x0001, 0x0004,
|
|
||||||
0x0001, 0x0002, 0x0001, 0x0004, 0x0001, 0x0002, 0x0001, 0x0004,
|
|
||||||
0x0001, 0x0002, 0x0001, 0x0004, 0x0001, 0x0002, 0x0001, 0x0004,
|
|
||||||
0x0001, 0x0002, 0x0001, 0x0004, 0x0001, 0x0002, 0x0001, 0x0004,
|
|
||||||
0x0001, 0x0002, 0x0001, 0x0004, 0x0001, 0x0002, 0x0001, 0x0004,
|
|
||||||
0x0001, 0x0002, 0x0001, 0x0004, 0x0001, 0x0002, 0x0001, 0x0004,
|
|
||||||
0x0001, 0x0002, 0x0001, 0x0002, 0x0001, 0x0002, 0x0001, 0x0002,
|
|
||||||
0x0001, 0x0002, 0x0001, 0x0002, 0x0001, 0x0002, 0x0001, 0x0002
|
|
||||||
};
|
|
||||||
|
|
||||||
// Onebyone 4 8 Voices
|
|
||||||
const uint16_t PROGMEM melody_builtin_onebyone_4_8_voices[] = {
|
|
||||||
0x0001, 0x0000, 0x0001, 0x0000, 0x0001, 0x0000, 0x0001, 0x0000,
|
|
||||||
0x0001, 0x0000, 0x0001, 0x0000, 0x0001, 0x0000, 0x0001, 0x0000,
|
|
||||||
0x0001, 0x0002, 0x0001, 0x0002, 0x0001, 0x0002, 0x0001, 0x0002,
|
|
||||||
0x0001, 0x0002, 0x0001, 0x0002, 0x0001, 0x0002, 0x0001, 0x0002,
|
|
||||||
0x0001, 0x0002, 0x0001, 0x0004, 0x0001, 0x0002, 0x0001, 0x0004,
|
|
||||||
0x0001, 0x0002, 0x0001, 0x0004, 0x0001, 0x0002, 0x0001, 0x0004,
|
|
||||||
0x0002, 0x0004, 0x0008, 0x0004, 0x0002, 0x0011, 0x0002, 0x0004,
|
|
||||||
0x0008, 0x0004, 0x0002, 0x0021, 0x0002, 0x0004, 0x0008, 0x0004,
|
|
||||||
0x0002, 0x0041, 0x0002, 0x0004, 0x0008, 0x0004, 0x0002, 0x0081,
|
|
||||||
0x0002, 0x0004, 0x0008, 0x0004, 0x0002, 0x0041, 0x0002, 0x0004,
|
|
||||||
0x0008, 0x0004, 0x0002, 0x0021, 0x0002, 0x0004, 0x0008, 0x0004,
|
|
||||||
0x0002, 0x0041, 0x0002, 0x0004, 0x0008, 0x0004, 0x0002, 0x0081,
|
|
||||||
0x0002, 0x0004, 0x0008, 0x0004, 0x0002, 0x0041, 0x0002, 0x0004,
|
|
||||||
0x0008, 0x0004, 0x0002, 0x0021, 0x0002, 0x0004, 0x0008, 0x0004,
|
|
||||||
0x0002, 0x0001, 0x0002, 0x0001, 0x0004, 0x0001, 0x0002, 0x0001,
|
|
||||||
0x0004, 0x0001, 0x0002, 0x0001, 0x0004, 0x0001, 0x0002, 0x0001,
|
|
||||||
0x0004, 0x0001, 0x0002, 0x0001, 0x0004, 0x0001, 0x0002, 0x0001,
|
|
||||||
0x0002, 0x0001, 0x0002, 0x0001, 0x0002, 0x0001, 0x0002, 0x0001,
|
|
||||||
0x0002, 0x0001, 0x0000
|
|
||||||
};
|
|
||||||
|
|
||||||
// Festive 1Voice
|
|
||||||
const uint16_t PROGMEM melody_builtin_festive_1voice[] = {
|
|
||||||
0x0001, 0x0001, 0x0001, 0x0000, 0x0001, 0x0001, 0x0001, 0x0001,
|
|
||||||
0x0000, 0x0001, 0x0000, 0x0001, 0x0001, 0x0001, 0x0000, 0x0001,
|
|
||||||
0x0001, 0x0001, 0x0001, 0x0001, 0x0001, 0x0000, 0x0001, 0x0000
|
|
||||||
};
|
|
||||||
|
|
||||||
// Festive 4Voices
|
|
||||||
const uint16_t PROGMEM melody_builtin_festive_4voices[] = {
|
|
||||||
0x0001, 0x0002, 0x0004, 0x0009, 0x0002, 0x0001, 0x0004, 0x0009
|
|
||||||
};
|
|
||||||
|
|
||||||
// Festive 5Voices
|
|
||||||
const uint16_t PROGMEM melody_builtin_festive_5voices[] = {
|
|
||||||
0x0001, 0x0002, 0x0004, 0x0002, 0x0001, 0x0002, 0x0001, 0x0002,
|
|
||||||
0x0008, 0x0002, 0x0001, 0x0002, 0x0001, 0x0002, 0x0004, 0x0002,
|
|
||||||
0x0001, 0x0002, 0x0001, 0x0002, 0x0010, 0x0002, 0x0001, 0x0002
|
|
||||||
};
|
|
||||||
|
|
||||||
// Festive 5Voice Alternative
|
|
||||||
const uint16_t PROGMEM melody_builtin_festive_5voice_alternative[] = {
|
|
||||||
0x0004, 0x0002, 0x0008, 0x0001, 0x0004, 0x0004, 0x0002, 0x0008,
|
|
||||||
0x0001, 0x0010, 0x0004, 0x0002, 0x0008, 0x0001, 0x0004, 0x0004,
|
|
||||||
0x0002, 0x0008, 0x0001, 0x0011, 0x0004, 0x0002, 0x0008, 0x0001,
|
|
||||||
0x0004, 0x0004, 0x0002, 0x0008, 0x0001, 0x0011, 0x0004, 0x0002,
|
|
||||||
0x0008, 0x0001, 0x0005, 0x0004, 0x0002, 0x0008, 0x0001, 0x0011,
|
|
||||||
0x0004, 0x0002, 0x0008, 0x0001, 0x0005, 0x0004, 0x0002, 0x0008,
|
|
||||||
0x0001, 0x0011, 0x0004, 0x0002, 0x0008, 0x0001, 0x0004, 0x0004,
|
|
||||||
0x0002, 0x0008, 0x0001, 0x0010, 0x0004, 0x0002, 0x0008, 0x0001,
|
|
||||||
0x0004, 0x0004, 0x0002, 0x0008, 0x0001, 0x0010
|
|
||||||
};
|
|
||||||
|
|
||||||
// Festive 6Voices
|
|
||||||
const uint16_t PROGMEM melody_builtin_festive_6voices[] = {
|
|
||||||
0x0001, 0x0002, 0x0004, 0x0002, 0x0001, 0x0002, 0x0001, 0x0002,
|
|
||||||
0x0008, 0x0002, 0x0001, 0x0002, 0x0001, 0x0002, 0x0004, 0x0002,
|
|
||||||
0x0001, 0x0002, 0x0001, 0x0002, 0x0010, 0x0002, 0x0001, 0x0002,
|
|
||||||
0x0001, 0x0002, 0x0009, 0x0002, 0x0001, 0x0002, 0x0011, 0x0002,
|
|
||||||
0x0001, 0x0002, 0x0005, 0x0002, 0x0001, 0x0002, 0x0021, 0x0002,
|
|
||||||
0x0001, 0x0002, 0x0009, 0x0002, 0x0001, 0x0002, 0x0011, 0x0002,
|
|
||||||
0x0001, 0x0002, 0x0005, 0x0002, 0x0001, 0x0002, 0x0021, 0x0002,
|
|
||||||
0x0001, 0x0002, 0x0009, 0x0002, 0x0001, 0x0002, 0x0011, 0x0002,
|
|
||||||
0x0001, 0x0002, 0x0005, 0x0002, 0x0001, 0x0002, 0x0021, 0x0002,
|
|
||||||
0x0001, 0x0002
|
|
||||||
};
|
|
||||||
|
|
||||||
// Festive 8Voices
|
|
||||||
const uint16_t PROGMEM melody_builtin_festive_8voices[] = {
|
|
||||||
0x0001, 0x0002, 0x0004, 0x0008, 0x0010, 0x0020, 0x0040, 0x0080
|
|
||||||
};
|
|
||||||
|
|
||||||
// Ormilia
|
|
||||||
const uint16_t PROGMEM melody_builtin_ormilia[] = {
|
|
||||||
0x0002, 0x0001, 0x0000, 0x0001, 0x0000, 0x0001, 0x0000, 0x0001,
|
|
||||||
0x0000, 0x0001, 0x0000, 0x0001, 0x0000, 0x0001, 0x0000, 0x0001,
|
|
||||||
0x0002, 0x0001, 0x0000, 0x0001, 0x0000, 0x0001, 0x0000, 0x0001,
|
|
||||||
0x0000, 0x0001, 0x0000, 0x0001, 0x0000, 0x0001, 0x0000, 0x0001,
|
|
||||||
0x0002, 0x0001, 0x0000, 0x0001, 0x0002, 0x0001, 0x0000, 0x0001,
|
|
||||||
0x0002, 0x0001, 0x0000, 0x0001, 0x0000, 0x0001, 0x0000, 0x0001,
|
|
||||||
0x0002, 0x0001, 0x0002, 0x0001, 0x0002, 0x0001, 0x0002, 0x0001,
|
|
||||||
0x0000, 0x0001, 0x0000, 0x0001, 0x0000, 0x0001, 0x0000, 0x0001,
|
|
||||||
0x0002, 0x0009, 0x0000, 0x0001, 0x0000, 0x0001, 0x0000, 0x0001,
|
|
||||||
0x0000, 0x0001, 0x0000, 0x0001, 0x0000, 0x0001, 0x0000, 0x0001,
|
|
||||||
0x0002, 0x0005, 0x0000, 0x0001, 0x0000, 0x0001, 0x0000, 0x0001,
|
|
||||||
0x0000, 0x0001, 0x0000, 0x0001, 0x0000, 0x0001, 0x0000, 0x0001,
|
|
||||||
0x0002, 0x0009, 0x0000, 0x0001, 0x0002, 0x0005, 0x0000, 0x0001,
|
|
||||||
0x0002, 0x0009, 0x0000, 0x0001, 0x0002, 0x0005, 0x0000, 0x0001,
|
|
||||||
0x0002, 0x0011, 0x0002, 0x0001, 0x0002, 0x0021, 0x0002, 0x0001,
|
|
||||||
0x0002, 0x0011, 0x0002, 0x0001, 0x0002, 0x0021, 0x0002, 0x0041,
|
|
||||||
0x0002, 0x0081, 0x0002, 0x0009, 0x0002, 0x0041, 0x0002, 0x0081,
|
|
||||||
0x0002, 0x0009, 0x0002, 0x0041, 0x0002, 0x0081, 0x0002, 0x0005,
|
|
||||||
0x0002, 0x0001, 0x0000
|
|
||||||
};
|
|
||||||
|
|
||||||
// ========================================
|
|
||||||
// Melody Information Structure
|
|
||||||
// ========================================
|
|
||||||
|
|
||||||
struct MelodyInfo {
|
|
||||||
const char* display_name;
|
|
||||||
const char* id;
|
|
||||||
const uint16_t* data;
|
|
||||||
size_t length;
|
|
||||||
};
|
|
||||||
|
|
||||||
// ========================================
|
|
||||||
// Melody Library
|
|
||||||
// ========================================
|
|
||||||
|
|
||||||
const MelodyInfo MELODY_LIBRARY[] = {
|
|
||||||
{
|
|
||||||
"Doxology Traditional",
|
|
||||||
"builtin_doxology_traditional",
|
|
||||||
melody_builtin_doxology_traditional,
|
|
||||||
sizeof(melody_builtin_doxology_traditional) / sizeof(uint16_t)
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"Doxology Alternative",
|
|
||||||
"builtin_doxology_alternative",
|
|
||||||
melody_builtin_doxology_alternative,
|
|
||||||
sizeof(melody_builtin_doxology_alternative) / sizeof(uint16_t)
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"Doxology Festive",
|
|
||||||
"builtin_doxology_festive",
|
|
||||||
melody_builtin_doxology_festive,
|
|
||||||
sizeof(melody_builtin_doxology_festive) / sizeof(uint16_t)
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"Vesper Traditional",
|
|
||||||
"builtin_vesper_traditional",
|
|
||||||
melody_builtin_vesper_traditional,
|
|
||||||
sizeof(melody_builtin_vesper_traditional) / sizeof(uint16_t)
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"Vesper Alternative",
|
|
||||||
"builtin_vesper_alternative",
|
|
||||||
melody_builtin_vesper_alternative,
|
|
||||||
sizeof(melody_builtin_vesper_alternative) / sizeof(uint16_t)
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"Catehetical",
|
|
||||||
"builtin_catehetical",
|
|
||||||
melody_builtin_catehetical,
|
|
||||||
sizeof(melody_builtin_catehetical) / sizeof(uint16_t)
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"Orthros Traditional",
|
|
||||||
"builtin_orthros_traditional",
|
|
||||||
melody_builtin_orthros_traditional,
|
|
||||||
sizeof(melody_builtin_orthros_traditional) / sizeof(uint16_t)
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"Orthros Alternative",
|
|
||||||
"builtin_orthros_alternative",
|
|
||||||
melody_builtin_orthros_alternative,
|
|
||||||
sizeof(melody_builtin_orthros_alternative) / sizeof(uint16_t)
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"Mournfull Toll",
|
|
||||||
"builtin_mournfull_toll",
|
|
||||||
melody_builtin_mournfull_toll,
|
|
||||||
sizeof(melody_builtin_mournfull_toll) / sizeof(uint16_t)
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"Mournfull Toll Alternative",
|
|
||||||
"builtin_mournfull_toll_alternative",
|
|
||||||
melody_builtin_mournfull_toll_alternative,
|
|
||||||
sizeof(melody_builtin_mournfull_toll_alternative) / sizeof(uint16_t)
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"Mournfull Toll Meg Par",
|
|
||||||
"builtin_mournfull_toll_meg_par",
|
|
||||||
melody_builtin_mournfull_toll_meg_par,
|
|
||||||
sizeof(melody_builtin_mournfull_toll_meg_par) / sizeof(uint16_t)
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"Sematron",
|
|
||||||
"builtin_sematron",
|
|
||||||
melody_builtin_sematron,
|
|
||||||
sizeof(melody_builtin_sematron) / sizeof(uint16_t)
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"Sematron Alternative",
|
|
||||||
"builtin_sematron_alternative",
|
|
||||||
melody_builtin_sematron_alternative,
|
|
||||||
sizeof(melody_builtin_sematron_alternative) / sizeof(uint16_t)
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"Athonite 1 2 Voices",
|
|
||||||
"builtin_athonite_1_2_voices",
|
|
||||||
melody_builtin_athonite_1_2_voices,
|
|
||||||
sizeof(melody_builtin_athonite_1_2_voices) / sizeof(uint16_t)
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"Athonite 3 Voices",
|
|
||||||
"builtin_athonite_3_voices",
|
|
||||||
melody_builtin_athonite_3_voices,
|
|
||||||
sizeof(melody_builtin_athonite_3_voices) / sizeof(uint16_t)
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"Athonite 3 4 Voices",
|
|
||||||
"builtin_athonite_3_4_voices",
|
|
||||||
melody_builtin_athonite_3_4_voices,
|
|
||||||
sizeof(melody_builtin_athonite_3_4_voices) / sizeof(uint16_t)
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"Athonite 4 8 Voices",
|
|
||||||
"builtin_athonite_4_8_voices",
|
|
||||||
melody_builtin_athonite_4_8_voices,
|
|
||||||
sizeof(melody_builtin_athonite_4_8_voices) / sizeof(uint16_t)
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"Onebyone 2 3 Voices",
|
|
||||||
"builtin_onebyone_2_3_voices",
|
|
||||||
melody_builtin_onebyone_2_3_voices,
|
|
||||||
sizeof(melody_builtin_onebyone_2_3_voices) / sizeof(uint16_t)
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"Onebyone 4 8 Voices",
|
|
||||||
"builtin_onebyone_4_8_voices",
|
|
||||||
melody_builtin_onebyone_4_8_voices,
|
|
||||||
sizeof(melody_builtin_onebyone_4_8_voices) / sizeof(uint16_t)
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"Festive 1Voice",
|
|
||||||
"builtin_festive_1voice",
|
|
||||||
melody_builtin_festive_1voice,
|
|
||||||
sizeof(melody_builtin_festive_1voice) / sizeof(uint16_t)
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"Festive 4Voices",
|
|
||||||
"builtin_festive_4voices",
|
|
||||||
melody_builtin_festive_4voices,
|
|
||||||
sizeof(melody_builtin_festive_4voices) / sizeof(uint16_t)
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"Festive 5Voices",
|
|
||||||
"builtin_festive_5voices",
|
|
||||||
melody_builtin_festive_5voices,
|
|
||||||
sizeof(melody_builtin_festive_5voices) / sizeof(uint16_t)
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"Festive 5Voice Alternative",
|
|
||||||
"builtin_festive_5voice_alternative",
|
|
||||||
melody_builtin_festive_5voice_alternative,
|
|
||||||
sizeof(melody_builtin_festive_5voice_alternative) / sizeof(uint16_t)
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"Festive 6Voices",
|
|
||||||
"builtin_festive_6voices",
|
|
||||||
melody_builtin_festive_6voices,
|
|
||||||
sizeof(melody_builtin_festive_6voices) / sizeof(uint16_t)
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"Festive 8Voices",
|
|
||||||
"builtin_festive_8voices",
|
|
||||||
melody_builtin_festive_8voices,
|
|
||||||
sizeof(melody_builtin_festive_8voices) / sizeof(uint16_t)
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"Ormilia",
|
|
||||||
"builtin_ormilia",
|
|
||||||
melody_builtin_ormilia,
|
|
||||||
sizeof(melody_builtin_ormilia) / sizeof(uint16_t)
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const size_t MELODY_LIBRARY_SIZE = 26;
|
|
||||||
|
|
||||||
#endif // MELODIES_H
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
Esperinos-Adamn-1k [min 1426 / mid 572 / max 194]: 1,0,1,0,1,0,0,0,1,0,1,0,1,0,0,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,0,0
|
|
||||||
Esperinos-Eortastikos-1k: 1,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,1,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0
|
|
||||||
Orthros-1k [min 552 / mid 1402 / max 2229]: 1,0,1,0,1,1,0
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
PyQt6>=6.4.0
|
|
||||||
numpy>=1.21.0
|
|
||||||
sounddevice>=0.4.6
|
|
||||||
BIN
VesperPlus.png
|
Before Width: | Height: | Size: 42 KiB |
4
backend/.dockerignore
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
firebase-service-account.json
|
||||||
|
__pycache__/
|
||||||
|
*.pyc
|
||||||
|
.venv/
|
||||||
@@ -1,5 +1,17 @@
|
|||||||
FROM python:3.11-slim
|
FROM python:3.11-slim
|
||||||
|
|
||||||
|
# System dependencies: WeasyPrint (pango/cairo), ffmpeg (video thumbs), poppler (pdf2image)
|
||||||
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
|
libpango-1.0-0 \
|
||||||
|
libpangocairo-1.0-0 \
|
||||||
|
libgdk-pixbuf-2.0-0 \
|
||||||
|
libffi-dev \
|
||||||
|
shared-mime-info \
|
||||||
|
fonts-dejavu-core \
|
||||||
|
ffmpeg \
|
||||||
|
poppler-utils \
|
||||||
|
&& apt-get clean && rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
COPY requirements.txt .
|
COPY requirements.txt .
|
||||||
|
|||||||
53
backend/admin/router.py
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
import asyncio
|
||||||
|
import hashlib
|
||||||
|
import hmac
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from fastapi import APIRouter, HTTPException, Request
|
||||||
|
|
||||||
|
from config import settings
|
||||||
|
|
||||||
|
logger = logging.getLogger("admin.deploy")
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/api/admin", tags=["admin"])
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/deploy")
|
||||||
|
async def deploy(request: Request):
|
||||||
|
"""Gitea webhook endpoint — pulls latest code and rebuilds Docker containers.
|
||||||
|
|
||||||
|
Gitea webhook configuration:
|
||||||
|
URL: https://<your-domain>/api/admin/deploy
|
||||||
|
Secret token: value of DEPLOY_SECRET env var
|
||||||
|
Content-Type: application/json
|
||||||
|
Trigger: Push events only (branch: main)
|
||||||
|
|
||||||
|
Add to VPS .env:
|
||||||
|
DEPLOY_SECRET=<random-strong-token>
|
||||||
|
DEPLOY_PROJECT_PATH=/home/bellsystems/bellsystems-cp
|
||||||
|
"""
|
||||||
|
if not settings.deploy_secret:
|
||||||
|
raise HTTPException(status_code=503, detail="Deploy secret not configured on server")
|
||||||
|
|
||||||
|
# Gitea sends the HMAC-SHA256 of the request body in X-Gitea-Signature
|
||||||
|
sig_header = request.headers.get("X-Gitea-Signature", "")
|
||||||
|
body = await request.body()
|
||||||
|
expected_sig = hmac.new(
|
||||||
|
key=settings.deploy_secret.encode(),
|
||||||
|
msg=body,
|
||||||
|
digestmod=hashlib.sha256,
|
||||||
|
).hexdigest()
|
||||||
|
if not hmac.compare_digest(sig_header, expected_sig):
|
||||||
|
raise HTTPException(status_code=403, detail="Invalid webhook signature")
|
||||||
|
|
||||||
|
logger.info("Auto-deploy triggered via Gitea webhook")
|
||||||
|
|
||||||
|
# Write a trigger file to the host-mounted project path.
|
||||||
|
# A host-side watcher service (bellsystems-deploy-watcher) polls for this
|
||||||
|
# file and runs deploy-host.sh as the bellsystems user when it appears.
|
||||||
|
trigger_path = f"{settings.deploy_project_path}/.deploy-trigger"
|
||||||
|
with open(trigger_path, "w") as f:
|
||||||
|
f.write("deploy\n")
|
||||||
|
|
||||||
|
logger.info("Auto-deploy trigger file written")
|
||||||
|
return {"ok": True, "message": "Deploy started"}
|
||||||
@@ -10,42 +10,141 @@ class Role(str, Enum):
|
|||||||
user = "user"
|
user = "user"
|
||||||
|
|
||||||
|
|
||||||
class SectionPermissions(BaseModel):
|
class MelodiesPermissions(BaseModel):
|
||||||
|
view: bool = False
|
||||||
|
add: bool = False
|
||||||
|
delete: bool = False
|
||||||
|
safe_edit: bool = False
|
||||||
|
full_edit: bool = False
|
||||||
|
archetype_access: bool = False
|
||||||
|
settings_access: bool = False
|
||||||
|
compose_access: bool = False
|
||||||
|
|
||||||
|
|
||||||
|
class DevicesPermissions(BaseModel):
|
||||||
|
view: bool = False
|
||||||
|
add: bool = False
|
||||||
|
delete: bool = False
|
||||||
|
safe_edit: bool = False
|
||||||
|
edit_bells: bool = False
|
||||||
|
edit_clock: bool = False
|
||||||
|
edit_warranty: bool = False
|
||||||
|
full_edit: bool = False
|
||||||
|
control: bool = False
|
||||||
|
|
||||||
|
|
||||||
|
class AppUsersPermissions(BaseModel):
|
||||||
|
view: bool = False
|
||||||
|
add: bool = False
|
||||||
|
delete: bool = False
|
||||||
|
safe_edit: bool = False
|
||||||
|
full_edit: bool = False
|
||||||
|
|
||||||
|
|
||||||
|
class IssuesNotesPermissions(BaseModel):
|
||||||
|
view: bool = False
|
||||||
|
add: bool = False
|
||||||
|
delete: bool = False
|
||||||
|
edit: bool = False
|
||||||
|
|
||||||
|
|
||||||
|
class MailPermissions(BaseModel):
|
||||||
|
view: bool = False
|
||||||
|
compose: bool = False
|
||||||
|
reply: bool = False
|
||||||
|
|
||||||
|
|
||||||
|
class CrmPermissions(BaseModel):
|
||||||
|
activity_log: bool = False
|
||||||
|
|
||||||
|
|
||||||
|
class CrmCustomersPermissions(BaseModel):
|
||||||
|
full_access: bool = False
|
||||||
|
overview: bool = False
|
||||||
|
orders_view: bool = False
|
||||||
|
orders_edit: bool = False
|
||||||
|
quotations_view: bool = False
|
||||||
|
quotations_edit: bool = False
|
||||||
|
comms_view: bool = False
|
||||||
|
comms_log: bool = False
|
||||||
|
comms_edit: bool = False
|
||||||
|
comms_compose: bool = False
|
||||||
|
add: bool = False
|
||||||
|
delete: bool = False
|
||||||
|
files_view: bool = False
|
||||||
|
files_edit: bool = False
|
||||||
|
devices_view: bool = False
|
||||||
|
devices_edit: bool = False
|
||||||
|
|
||||||
|
|
||||||
|
class CrmProductsPermissions(BaseModel):
|
||||||
view: bool = False
|
view: bool = False
|
||||||
add: bool = False
|
add: bool = False
|
||||||
edit: bool = False
|
edit: bool = False
|
||||||
delete: bool = False
|
|
||||||
|
|
||||||
|
class MfgPermissions(BaseModel):
|
||||||
|
view_inventory: bool = False
|
||||||
|
edit: bool = False
|
||||||
|
provision: bool = False
|
||||||
|
firmware_view: bool = False
|
||||||
|
firmware_edit: bool = False
|
||||||
|
|
||||||
|
|
||||||
|
class ApiReferencePermissions(BaseModel):
|
||||||
|
access: bool = False
|
||||||
|
|
||||||
|
|
||||||
|
class MqttPermissions(BaseModel):
|
||||||
|
access: bool = False
|
||||||
|
|
||||||
|
|
||||||
class StaffPermissions(BaseModel):
|
class StaffPermissions(BaseModel):
|
||||||
melodies: SectionPermissions = SectionPermissions()
|
melodies: MelodiesPermissions = MelodiesPermissions()
|
||||||
devices: SectionPermissions = SectionPermissions()
|
devices: DevicesPermissions = DevicesPermissions()
|
||||||
app_users: SectionPermissions = SectionPermissions()
|
app_users: AppUsersPermissions = AppUsersPermissions()
|
||||||
equipment: SectionPermissions = SectionPermissions()
|
issues_notes: IssuesNotesPermissions = IssuesNotesPermissions()
|
||||||
mqtt: bool = False
|
mail: MailPermissions = MailPermissions()
|
||||||
|
crm: CrmPermissions = CrmPermissions()
|
||||||
|
crm_customers: CrmCustomersPermissions = CrmCustomersPermissions()
|
||||||
|
crm_products: CrmProductsPermissions = CrmProductsPermissions()
|
||||||
|
mfg: MfgPermissions = MfgPermissions()
|
||||||
|
api_reference: ApiReferencePermissions = ApiReferencePermissions()
|
||||||
|
mqtt: MqttPermissions = MqttPermissions()
|
||||||
|
|
||||||
|
|
||||||
# Default permissions per role
|
|
||||||
def default_permissions_for_role(role: str) -> Optional[dict]:
|
def default_permissions_for_role(role: str) -> Optional[dict]:
|
||||||
if role in ("sysadmin", "admin"):
|
if role in ("sysadmin", "admin"):
|
||||||
return None # Full access, permissions field not used
|
return None # Full access, permissions field not used
|
||||||
full = {"view": True, "add": True, "edit": True, "delete": True}
|
|
||||||
view_only = {"view": True, "add": False, "edit": False, "delete": False}
|
|
||||||
if role == "editor":
|
if role == "editor":
|
||||||
return {
|
return {
|
||||||
"melodies": full,
|
"melodies": {"view": True, "add": True, "delete": True, "safe_edit": True, "full_edit": True, "archetype_access": True, "settings_access": True, "compose_access": True},
|
||||||
"devices": full,
|
"devices": {"view": True, "add": True, "delete": True, "safe_edit": True, "edit_bells": True, "edit_clock": True, "edit_warranty": True, "full_edit": True, "control": True},
|
||||||
"app_users": full,
|
"app_users": {"view": True, "add": True, "delete": True, "safe_edit": True, "full_edit": True},
|
||||||
"equipment": full,
|
"issues_notes": {"view": True, "add": True, "delete": True, "edit": True},
|
||||||
"mqtt": True,
|
"mail": {"view": True, "compose": True, "reply": True},
|
||||||
|
"crm": {"activity_log": True},
|
||||||
|
"crm_customers": {"full_access": True, "overview": True, "orders_view": True, "orders_edit": True, "quotations_view": True, "quotations_edit": True, "comms_view": True, "comms_log": True, "comms_edit": True, "comms_compose": True, "add": True, "delete": True, "files_view": True, "files_edit": True, "devices_view": True, "devices_edit": True},
|
||||||
|
"crm_products": {"view": True, "add": True, "edit": True},
|
||||||
|
"mfg": {"view_inventory": True, "edit": True, "provision": True, "firmware_view": True, "firmware_edit": True},
|
||||||
|
"api_reference": {"access": True},
|
||||||
|
"mqtt": {"access": True},
|
||||||
}
|
}
|
||||||
|
|
||||||
# user role - view only
|
# user role - view only
|
||||||
return {
|
return {
|
||||||
"melodies": view_only,
|
"melodies": {"view": True, "add": False, "delete": False, "safe_edit": False, "full_edit": False, "archetype_access": False, "settings_access": False, "compose_access": False},
|
||||||
"devices": view_only,
|
"devices": {"view": True, "add": False, "delete": False, "safe_edit": False, "edit_bells": False, "edit_clock": False, "edit_warranty": False, "full_edit": False, "control": False},
|
||||||
"app_users": view_only,
|
"app_users": {"view": True, "add": False, "delete": False, "safe_edit": False, "full_edit": False},
|
||||||
"equipment": view_only,
|
"issues_notes": {"view": True, "add": False, "delete": False, "edit": False},
|
||||||
"mqtt": False,
|
"mail": {"view": True, "compose": False, "reply": False},
|
||||||
|
"crm": {"activity_log": False},
|
||||||
|
"crm_customers": {"full_access": False, "overview": True, "orders_view": True, "orders_edit": False, "quotations_view": True, "quotations_edit": False, "comms_view": True, "comms_log": False, "comms_edit": False, "comms_compose": False, "add": False, "delete": False, "files_view": True, "files_edit": False, "devices_view": True, "devices_edit": False},
|
||||||
|
"crm_products": {"view": True, "add": False, "edit": False},
|
||||||
|
"mfg": {"view_inventory": True, "edit": False, "provision": False, "firmware_view": True, "firmware_edit": False},
|
||||||
|
"api_reference": {"access": False},
|
||||||
|
"mqtt": {"access": False},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,27 +1,38 @@
|
|||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
from mqtt.database import get_db
|
from database import get_db
|
||||||
|
|
||||||
logger = logging.getLogger("builder.database")
|
logger = logging.getLogger("builder.database")
|
||||||
|
|
||||||
|
|
||||||
async def insert_built_melody(melody_id: str, name: str, pid: str, steps: str) -> None:
|
async def insert_built_melody(melody_id: str, name: str, pid: str, steps: str, is_builtin: bool = False) -> None:
|
||||||
db = await get_db()
|
db = await get_db()
|
||||||
await db.execute(
|
await db.execute(
|
||||||
"""INSERT INTO built_melodies (id, name, pid, steps, assigned_melody_ids)
|
"""INSERT INTO built_melodies (id, name, pid, steps, assigned_melody_ids, is_builtin)
|
||||||
VALUES (?, ?, ?, ?, ?)""",
|
VALUES (?, ?, ?, ?, ?, ?)""",
|
||||||
(melody_id, name, pid, steps, json.dumps([])),
|
(melody_id, name, pid, steps, json.dumps([]), 1 if is_builtin else 0),
|
||||||
)
|
)
|
||||||
await db.commit()
|
await db.commit()
|
||||||
|
|
||||||
|
|
||||||
async def update_built_melody(melody_id: str, name: str, pid: str, steps: str) -> None:
|
async def update_built_melody(melody_id: str, name: str, pid: str, steps: str, is_builtin: bool = False) -> None:
|
||||||
db = await get_db()
|
db = await get_db()
|
||||||
await db.execute(
|
await db.execute(
|
||||||
"""UPDATE built_melodies
|
"""UPDATE built_melodies
|
||||||
SET name = ?, pid = ?, steps = ?, updated_at = datetime('now')
|
SET name = ?, pid = ?, steps = ?, is_builtin = ?, updated_at = datetime('now')
|
||||||
WHERE id = ?""",
|
WHERE id = ?""",
|
||||||
(name, pid, steps, melody_id),
|
(name, pid, steps, 1 if is_builtin else 0, melody_id),
|
||||||
|
)
|
||||||
|
await db.commit()
|
||||||
|
|
||||||
|
|
||||||
|
async def update_builtin_flag(melody_id: str, is_builtin: bool) -> None:
|
||||||
|
db = await get_db()
|
||||||
|
await db.execute(
|
||||||
|
"""UPDATE built_melodies
|
||||||
|
SET is_builtin = ?, updated_at = datetime('now')
|
||||||
|
WHERE id = ?""",
|
||||||
|
(1 if is_builtin else 0, melody_id),
|
||||||
)
|
)
|
||||||
await db.commit()
|
await db.commit()
|
||||||
|
|
||||||
@@ -68,6 +79,7 @@ async def get_built_melody(melody_id: str) -> dict | None:
|
|||||||
return None
|
return None
|
||||||
row = dict(rows[0])
|
row = dict(rows[0])
|
||||||
row["assigned_melody_ids"] = json.loads(row["assigned_melody_ids"] or "[]")
|
row["assigned_melody_ids"] = json.loads(row["assigned_melody_ids"] or "[]")
|
||||||
|
row["is_builtin"] = bool(row.get("is_builtin", 0))
|
||||||
return row
|
return row
|
||||||
|
|
||||||
|
|
||||||
@@ -80,6 +92,7 @@ async def list_built_melodies() -> list[dict]:
|
|||||||
for row in rows:
|
for row in rows:
|
||||||
r = dict(row)
|
r = dict(row)
|
||||||
r["assigned_melody_ids"] = json.loads(r["assigned_melody_ids"] or "[]")
|
r["assigned_melody_ids"] = json.loads(r["assigned_melody_ids"] or "[]")
|
||||||
|
r["is_builtin"] = bool(r.get("is_builtin", 0))
|
||||||
results.append(r)
|
results.append(r)
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
|||||||
@@ -6,12 +6,14 @@ class BuiltMelodyCreate(BaseModel):
|
|||||||
name: str
|
name: str
|
||||||
pid: str
|
pid: str
|
||||||
steps: str # raw step string e.g. "1,2,2+1,1,2,3+1"
|
steps: str # raw step string e.g. "1,2,2+1,1,2,3+1"
|
||||||
|
is_builtin: bool = False
|
||||||
|
|
||||||
|
|
||||||
class BuiltMelodyUpdate(BaseModel):
|
class BuiltMelodyUpdate(BaseModel):
|
||||||
name: Optional[str] = None
|
name: Optional[str] = None
|
||||||
pid: Optional[str] = None
|
pid: Optional[str] = None
|
||||||
steps: Optional[str] = None
|
steps: Optional[str] = None
|
||||||
|
is_builtin: Optional[bool] = None
|
||||||
|
|
||||||
|
|
||||||
class BuiltMelodyInDB(BaseModel):
|
class BuiltMelodyInDB(BaseModel):
|
||||||
@@ -19,6 +21,7 @@ class BuiltMelodyInDB(BaseModel):
|
|||||||
name: str
|
name: str
|
||||||
pid: str
|
pid: str
|
||||||
steps: str
|
steps: str
|
||||||
|
is_builtin: bool = False
|
||||||
binary_path: Optional[str] = None
|
binary_path: Optional[str] = None
|
||||||
binary_url: Optional[str] = None
|
binary_url: Optional[str] = None
|
||||||
progmem_code: Optional[str] = None
|
progmem_code: Optional[str] = None
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
from fastapi import APIRouter, Depends, HTTPException
|
from fastapi import APIRouter, Depends, HTTPException
|
||||||
from fastapi.responses import FileResponse
|
from fastapi.responses import FileResponse, PlainTextResponse
|
||||||
from auth.models import TokenPayload
|
from auth.models import TokenPayload
|
||||||
from auth.dependencies import require_permission
|
from auth.dependencies import require_permission
|
||||||
from builder.models import (
|
from builder.models import (
|
||||||
@@ -20,6 +20,7 @@ async def list_built_melodies(
|
|||||||
melodies = await service.list_built_melodies()
|
melodies = await service.list_built_melodies()
|
||||||
return BuiltMelodyListResponse(melodies=melodies, total=len(melodies))
|
return BuiltMelodyListResponse(melodies=melodies, total=len(melodies))
|
||||||
|
|
||||||
|
|
||||||
@router.get("/for-melody/{firestore_melody_id}")
|
@router.get("/for-melody/{firestore_melody_id}")
|
||||||
async def get_for_firestore_melody(
|
async def get_for_firestore_melody(
|
||||||
firestore_melody_id: str,
|
firestore_melody_id: str,
|
||||||
@@ -32,6 +33,14 @@ async def get_for_firestore_melody(
|
|||||||
return result.model_dump()
|
return result.model_dump()
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/generate-builtin-list")
|
||||||
|
async def generate_builtin_list(
|
||||||
|
_user: TokenPayload = Depends(require_permission("melodies", "view")),
|
||||||
|
):
|
||||||
|
"""Generate a C++ header with PROGMEM arrays for all is_builtin archetypes."""
|
||||||
|
code = await service.generate_builtin_list()
|
||||||
|
return PlainTextResponse(content=code, media_type="text/plain")
|
||||||
|
|
||||||
|
|
||||||
@router.get("/{melody_id}", response_model=BuiltMelodyInDB)
|
@router.get("/{melody_id}", response_model=BuiltMelodyInDB)
|
||||||
async def get_built_melody(
|
async def get_built_melody(
|
||||||
@@ -66,6 +75,15 @@ async def delete_built_melody(
|
|||||||
await service.delete_built_melody(melody_id)
|
await service.delete_built_melody(melody_id)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{melody_id}/toggle-builtin", response_model=BuiltMelodyInDB)
|
||||||
|
async def toggle_builtin(
|
||||||
|
melody_id: str,
|
||||||
|
_user: TokenPayload = Depends(require_permission("melodies", "edit")),
|
||||||
|
):
|
||||||
|
"""Toggle the is_builtin flag for an archetype."""
|
||||||
|
return await service.toggle_builtin(melody_id)
|
||||||
|
|
||||||
|
|
||||||
@router.post("/{melody_id}/build-binary", response_model=BuiltMelodyInDB)
|
@router.post("/{melody_id}/build-binary", response_model=BuiltMelodyInDB)
|
||||||
async def build_binary(
|
async def build_binary(
|
||||||
melody_id: str,
|
melody_id: str,
|
||||||
|
|||||||
@@ -7,11 +7,12 @@ from typing import List, Optional
|
|||||||
from builder import database as db
|
from builder import database as db
|
||||||
from builder.models import BuiltMelodyCreate, BuiltMelodyUpdate, BuiltMelodyInDB
|
from builder.models import BuiltMelodyCreate, BuiltMelodyUpdate, BuiltMelodyInDB
|
||||||
from fastapi import HTTPException
|
from fastapi import HTTPException
|
||||||
|
from config import settings
|
||||||
|
|
||||||
logger = logging.getLogger("builder.service")
|
logger = logging.getLogger("builder.service")
|
||||||
|
|
||||||
# Storage directory for built .bsm files
|
# Storage directory for built .bsm files — configurable via BUILT_MELODIES_STORAGE_PATH env var
|
||||||
STORAGE_DIR = Path(__file__).parent.parent / "storage" / "built_melodies"
|
STORAGE_DIR = Path(settings.built_melodies_storage_path)
|
||||||
|
|
||||||
|
|
||||||
def _ensure_storage_dir():
|
def _ensure_storage_dir():
|
||||||
@@ -31,6 +32,7 @@ def _row_to_built_melody(row: dict) -> BuiltMelodyInDB:
|
|||||||
name=row["name"],
|
name=row["name"],
|
||||||
pid=row["pid"],
|
pid=row["pid"],
|
||||||
steps=row["steps"],
|
steps=row["steps"],
|
||||||
|
is_builtin=row.get("is_builtin", False),
|
||||||
binary_path=binary_path,
|
binary_path=binary_path,
|
||||||
binary_url=binary_url,
|
binary_url=binary_url,
|
||||||
progmem_code=row.get("progmem_code"),
|
progmem_code=row.get("progmem_code"),
|
||||||
@@ -150,8 +152,12 @@ async def create_built_melody(data: BuiltMelodyCreate) -> BuiltMelodyInDB:
|
|||||||
name=data.name,
|
name=data.name,
|
||||||
pid=data.pid,
|
pid=data.pid,
|
||||||
steps=data.steps,
|
steps=data.steps,
|
||||||
|
is_builtin=data.is_builtin,
|
||||||
)
|
)
|
||||||
return await get_built_melody(melody_id)
|
# Auto-build binary and builtin code on creation
|
||||||
|
result = await get_built_melody(melody_id)
|
||||||
|
result = await _do_build(melody_id)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
async def update_built_melody(melody_id: str, data: BuiltMelodyUpdate) -> BuiltMelodyInDB:
|
async def update_built_melody(melody_id: str, data: BuiltMelodyUpdate) -> BuiltMelodyInDB:
|
||||||
@@ -162,11 +168,22 @@ async def update_built_melody(melody_id: str, data: BuiltMelodyUpdate) -> BuiltM
|
|||||||
new_name = data.name if data.name is not None else row["name"]
|
new_name = data.name if data.name is not None else row["name"]
|
||||||
new_pid = data.pid if data.pid is not None else row["pid"]
|
new_pid = data.pid if data.pid is not None else row["pid"]
|
||||||
new_steps = data.steps if data.steps is not None else row["steps"]
|
new_steps = data.steps if data.steps is not None else row["steps"]
|
||||||
|
new_is_builtin = data.is_builtin if data.is_builtin is not None else row.get("is_builtin", False)
|
||||||
|
|
||||||
await _check_unique(new_name, new_pid or "", exclude_id=melody_id)
|
await _check_unique(new_name, new_pid or "", exclude_id=melody_id)
|
||||||
|
|
||||||
await db.update_built_melody(melody_id, name=new_name, pid=new_pid, steps=new_steps)
|
steps_changed = (data.steps is not None) and (data.steps != row["steps"])
|
||||||
return await get_built_melody(melody_id)
|
|
||||||
|
await db.update_built_melody(melody_id, name=new_name, pid=new_pid, steps=new_steps, is_builtin=new_is_builtin)
|
||||||
|
|
||||||
|
# If steps changed, flag all assigned melodies as outdated, then rebuild
|
||||||
|
if steps_changed:
|
||||||
|
assigned_ids = row.get("assigned_melody_ids", [])
|
||||||
|
if assigned_ids:
|
||||||
|
await _flag_melodies_outdated(assigned_ids, True)
|
||||||
|
|
||||||
|
# Auto-rebuild binary and builtin code on every save
|
||||||
|
return await _do_build(melody_id)
|
||||||
|
|
||||||
|
|
||||||
async def delete_built_melody(melody_id: str) -> None:
|
async def delete_built_melody(melody_id: str) -> None:
|
||||||
@@ -174,6 +191,11 @@ async def delete_built_melody(melody_id: str) -> None:
|
|||||||
if not row:
|
if not row:
|
||||||
raise HTTPException(status_code=404, detail=f"Built melody '{melody_id}' not found")
|
raise HTTPException(status_code=404, detail=f"Built melody '{melody_id}' not found")
|
||||||
|
|
||||||
|
# Flag all assigned melodies as outdated before deleting
|
||||||
|
assigned_ids = row.get("assigned_melody_ids", [])
|
||||||
|
if assigned_ids:
|
||||||
|
await _flag_melodies_outdated(assigned_ids, True)
|
||||||
|
|
||||||
# Delete the .bsm file if it exists
|
# Delete the .bsm file if it exists
|
||||||
if row.get("binary_path"):
|
if row.get("binary_path"):
|
||||||
bsm_path = Path(row["binary_path"])
|
bsm_path = Path(row["binary_path"])
|
||||||
@@ -183,10 +205,26 @@ async def delete_built_melody(melody_id: str) -> None:
|
|||||||
await db.delete_built_melody(melody_id)
|
await db.delete_built_melody(melody_id)
|
||||||
|
|
||||||
|
|
||||||
|
async def toggle_builtin(melody_id: str) -> BuiltMelodyInDB:
|
||||||
|
"""Toggle the is_builtin flag for an archetype."""
|
||||||
|
row = await db.get_built_melody(melody_id)
|
||||||
|
if not row:
|
||||||
|
raise HTTPException(status_code=404, detail=f"Built melody '{melody_id}' not found")
|
||||||
|
new_value = not row.get("is_builtin", False)
|
||||||
|
await db.update_builtin_flag(melody_id, new_value)
|
||||||
|
return await get_built_melody(melody_id)
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
# Build Actions
|
# Build Actions
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
|
|
||||||
|
async def _do_build(melody_id: str) -> BuiltMelodyInDB:
|
||||||
|
"""Internal: build both binary and PROGMEM code, return updated record."""
|
||||||
|
await build_binary(melody_id)
|
||||||
|
return await build_builtin_code(melody_id)
|
||||||
|
|
||||||
|
|
||||||
async def build_binary(melody_id: str) -> BuiltMelodyInDB:
|
async def build_binary(melody_id: str) -> BuiltMelodyInDB:
|
||||||
"""Parse steps and write a .bsm binary file to storage."""
|
"""Parse steps and write a .bsm binary file to storage."""
|
||||||
row = await db.get_built_melody(melody_id)
|
row = await db.get_built_melody(melody_id)
|
||||||
@@ -235,6 +273,48 @@ async def get_binary_path(melody_id: str) -> Optional[Path]:
|
|||||||
return path
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
async def generate_builtin_list() -> str:
|
||||||
|
"""Generate a C++ header with PROGMEM arrays for all is_builtin archetypes."""
|
||||||
|
rows = await db.list_built_melodies()
|
||||||
|
builtin_rows = [r for r in rows if r.get("is_builtin")]
|
||||||
|
|
||||||
|
if not builtin_rows:
|
||||||
|
return "// No built-in archetypes defined.\n"
|
||||||
|
|
||||||
|
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
parts = [
|
||||||
|
f"// Auto-generated Built-in Archetype List",
|
||||||
|
f"// Generated: {timestamp}",
|
||||||
|
f"// Total built-ins: {len(builtin_rows)}",
|
||||||
|
"",
|
||||||
|
"#pragma once",
|
||||||
|
"#include <avr/pgmspace.h>",
|
||||||
|
"",
|
||||||
|
]
|
||||||
|
|
||||||
|
entry_refs = []
|
||||||
|
for row in builtin_rows:
|
||||||
|
values = steps_string_to_values(row["steps"])
|
||||||
|
array_name = f"melody_builtin_{row['name'].lower().replace(' ', '_')}"
|
||||||
|
display_name = row["name"].replace("_", " ").title()
|
||||||
|
pid = row.get("pid") or f"builtin_{row['name'].lower()}"
|
||||||
|
|
||||||
|
parts.append(f"// {display_name} | PID: {pid} | Steps: {len(values)}")
|
||||||
|
parts.append(format_melody_array(row["name"].lower().replace(" ", "_"), values))
|
||||||
|
parts.append("")
|
||||||
|
entry_refs.append((display_name, pid, array_name, len(values)))
|
||||||
|
|
||||||
|
# Generate MELODY_LIBRARY array
|
||||||
|
parts.append("// --- MELODY_LIBRARY entries ---")
|
||||||
|
parts.append("// Add these to your firmware's MELODY_LIBRARY[] array:")
|
||||||
|
parts.append("// {")
|
||||||
|
for display_name, pid, array_name, step_count in entry_refs:
|
||||||
|
parts.append(f'// {{ "{display_name}", "{pid}", {array_name}, {step_count} }},')
|
||||||
|
parts.append("// };")
|
||||||
|
|
||||||
|
return "\n".join(parts)
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
# Assignment
|
# Assignment
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
@@ -250,6 +330,9 @@ async def assign_to_melody(built_id: str, firestore_melody_id: str) -> BuiltMelo
|
|||||||
assigned.append(firestore_melody_id)
|
assigned.append(firestore_melody_id)
|
||||||
await db.update_assigned_melody_ids(built_id, assigned)
|
await db.update_assigned_melody_ids(built_id, assigned)
|
||||||
|
|
||||||
|
# Clear outdated flag on the melody being assigned
|
||||||
|
await _flag_melodies_outdated([firestore_melody_id], False)
|
||||||
|
|
||||||
return await get_built_melody(built_id)
|
return await get_built_melody(built_id)
|
||||||
|
|
||||||
|
|
||||||
@@ -261,6 +344,10 @@ async def unassign_from_melody(built_id: str, firestore_melody_id: str) -> Built
|
|||||||
|
|
||||||
assigned = [mid for mid in row.get("assigned_melody_ids", []) if mid != firestore_melody_id]
|
assigned = [mid for mid in row.get("assigned_melody_ids", []) if mid != firestore_melody_id]
|
||||||
await db.update_assigned_melody_ids(built_id, assigned)
|
await db.update_assigned_melody_ids(built_id, assigned)
|
||||||
|
|
||||||
|
# Flag the melody as outdated since it no longer has an archetype
|
||||||
|
await _flag_melodies_outdated([firestore_melody_id], True)
|
||||||
|
|
||||||
return await get_built_melody(built_id)
|
return await get_built_melody(built_id)
|
||||||
|
|
||||||
|
|
||||||
@@ -271,3 +358,48 @@ async def get_built_melody_for_firestore_id(firestore_melody_id: str) -> Optiona
|
|||||||
if firestore_melody_id in row.get("assigned_melody_ids", []):
|
if firestore_melody_id in row.get("assigned_melody_ids", []):
|
||||||
return _row_to_built_melody(row)
|
return _row_to_built_melody(row)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Outdated Flag Helpers
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
async def _flag_melodies_outdated(melody_ids: List[str], outdated: bool) -> None:
|
||||||
|
"""Set or clear the outdated_archetype flag on a list of Firestore melody IDs.
|
||||||
|
|
||||||
|
This updates both SQLite (melody_drafts) and Firestore (published melodies).
|
||||||
|
We import inline to avoid circular imports.
|
||||||
|
"""
|
||||||
|
if not melody_ids:
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
from melodies import database as melody_db
|
||||||
|
from shared.firebase import get_db as get_firestore
|
||||||
|
except ImportError:
|
||||||
|
logger.warning("Could not import melody/firebase modules — skipping outdated flag update")
|
||||||
|
return
|
||||||
|
|
||||||
|
firestore_db = get_firestore()
|
||||||
|
|
||||||
|
for melody_id in melody_ids:
|
||||||
|
try:
|
||||||
|
row = await melody_db.get_melody(melody_id)
|
||||||
|
if not row:
|
||||||
|
continue
|
||||||
|
|
||||||
|
data = row["data"]
|
||||||
|
info = dict(data.get("information", {}))
|
||||||
|
info["outdated_archetype"] = outdated
|
||||||
|
data["information"] = info
|
||||||
|
|
||||||
|
await melody_db.update_melody(melody_id, data)
|
||||||
|
|
||||||
|
# If published, also update Firestore
|
||||||
|
if row.get("status") == "published":
|
||||||
|
doc_ref = firestore_db.collection("melodies").document(melody_id)
|
||||||
|
doc_ref.update({"information.outdated_archetype": outdated})
|
||||||
|
|
||||||
|
logger.info(f"Set outdated_archetype={outdated} on melody {melody_id}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to set outdated flag on melody {melody_id}: {e}")
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
from pydantic_settings import BaseSettings
|
from pydantic_settings import BaseSettings
|
||||||
from typing import List
|
from typing import List, Dict, Any
|
||||||
import json
|
import json
|
||||||
|
|
||||||
|
|
||||||
@@ -18,20 +18,67 @@ class Settings(BaseSettings):
|
|||||||
mqtt_broker_port: int = 1883
|
mqtt_broker_port: int = 1883
|
||||||
mqtt_admin_username: str = "admin"
|
mqtt_admin_username: str = "admin"
|
||||||
mqtt_admin_password: str = ""
|
mqtt_admin_password: str = ""
|
||||||
|
mqtt_secret: str = "change-me-in-production"
|
||||||
mosquitto_password_file: str = "/etc/mosquitto/passwd"
|
mosquitto_password_file: str = "/etc/mosquitto/passwd"
|
||||||
|
mqtt_client_id: str = "bellsystems-admin-panel"
|
||||||
|
|
||||||
# SQLite (MQTT data storage)
|
# SQLite (local application database)
|
||||||
sqlite_db_path: str = "./mqtt_data.db"
|
sqlite_db_path: str = "./data/database.db"
|
||||||
mqtt_data_retention_days: int = 90
|
mqtt_data_retention_days: int = 90
|
||||||
|
|
||||||
|
# Local file storage
|
||||||
|
built_melodies_storage_path: str = "./storage/built_melodies"
|
||||||
|
firmware_storage_path: str = "./storage/firmware"
|
||||||
|
flash_assets_storage_path: str = "./storage/flash_assets"
|
||||||
|
|
||||||
|
# Email (Resend)
|
||||||
|
resend_api_key: str = "re_placeholder_change_me"
|
||||||
|
email_from: str = "noreply@yourdomain.com"
|
||||||
|
|
||||||
# App
|
# App
|
||||||
backend_cors_origins: str = '["http://localhost:5173"]'
|
backend_cors_origins: str = '["http://localhost:5173"]'
|
||||||
debug: bool = True
|
debug: bool = True
|
||||||
|
|
||||||
|
# Nextcloud WebDAV
|
||||||
|
nextcloud_url: str = ""
|
||||||
|
nextcloud_username: str = "" # WebDAV login & URL path username
|
||||||
|
nextcloud_password: str = "" # Use an app password for better security
|
||||||
|
nextcloud_dav_user: str = "" # Override URL path username if different from login
|
||||||
|
nextcloud_base_path: str = "BellSystems"
|
||||||
|
|
||||||
|
# IMAP/SMTP Email
|
||||||
|
imap_host: str = ""
|
||||||
|
imap_port: int = 993
|
||||||
|
imap_username: str = ""
|
||||||
|
imap_password: str = ""
|
||||||
|
imap_use_ssl: bool = True
|
||||||
|
smtp_host: str = ""
|
||||||
|
smtp_port: int = 587
|
||||||
|
smtp_username: str = ""
|
||||||
|
smtp_password: str = ""
|
||||||
|
smtp_use_tls: bool = True
|
||||||
|
email_sync_interval_minutes: int = 15
|
||||||
|
# Multi-mailbox config (JSON array). If empty, legacy single-account IMAP/SMTP is used.
|
||||||
|
# Example item:
|
||||||
|
# {"key":"sales","label":"Sales","email":"sales@bellsystems.gr","imap_host":"...","imap_username":"...","imap_password":"...","smtp_host":"...","smtp_username":"...","smtp_password":"...","sync_inbound":true,"allow_send":true}
|
||||||
|
mail_accounts_json: str = "[]"
|
||||||
|
|
||||||
|
# Auto-deploy (Gitea webhook)
|
||||||
|
deploy_secret: str = ""
|
||||||
|
deploy_project_path: str = "/app"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def cors_origins(self) -> List[str]:
|
def cors_origins(self) -> List[str]:
|
||||||
return json.loads(self.backend_cors_origins)
|
return json.loads(self.backend_cors_origins)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def mail_accounts(self) -> List[Dict[str, Any]]:
|
||||||
|
try:
|
||||||
|
raw = json.loads(self.mail_accounts_json or "[]")
|
||||||
|
return raw if isinstance(raw, list) else []
|
||||||
|
except Exception:
|
||||||
|
return []
|
||||||
|
|
||||||
model_config = {"env_file": ".env", "extra": "ignore"}
|
model_config = {"env_file": ".env", "extra": "ignore"}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
0
backend/crm/__init__.py
Normal file
417
backend/crm/comms_router.py
Normal file
@@ -0,0 +1,417 @@
|
|||||||
|
import base64
|
||||||
|
import json
|
||||||
|
from fastapi import APIRouter, Depends, HTTPException, Query, Form, File, UploadFile
|
||||||
|
from pydantic import BaseModel
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
|
from auth.models import TokenPayload
|
||||||
|
from auth.dependencies import require_permission
|
||||||
|
from config import settings
|
||||||
|
from crm.models import CommCreate, CommUpdate, CommInDB, CommListResponse, MediaCreate, MediaDirection
|
||||||
|
from crm import service
|
||||||
|
from crm import email_sync
|
||||||
|
from crm.mail_accounts import get_mail_accounts
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/api/crm/comms", tags=["crm-comms"])
|
||||||
|
|
||||||
|
|
||||||
|
class EmailSendResponse(BaseModel):
|
||||||
|
entry: dict
|
||||||
|
|
||||||
|
|
||||||
|
class EmailSyncResponse(BaseModel):
|
||||||
|
new_count: int
|
||||||
|
|
||||||
|
|
||||||
|
class MailListResponse(BaseModel):
|
||||||
|
entries: list
|
||||||
|
total: int
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/all", response_model=CommListResponse)
|
||||||
|
async def list_all_comms(
|
||||||
|
type: Optional[str] = Query(None),
|
||||||
|
direction: Optional[str] = Query(None),
|
||||||
|
limit: int = Query(200, le=500),
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||||
|
):
|
||||||
|
entries = await service.list_all_comms(type=type, direction=direction, limit=limit)
|
||||||
|
return CommListResponse(entries=entries, total=len(entries))
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("", response_model=CommListResponse)
|
||||||
|
async def list_comms(
|
||||||
|
customer_id: str = Query(...),
|
||||||
|
type: Optional[str] = Query(None),
|
||||||
|
direction: Optional[str] = Query(None),
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||||
|
):
|
||||||
|
entries = await service.list_comms(customer_id=customer_id, type=type, direction=direction)
|
||||||
|
return CommListResponse(entries=entries, total=len(entries))
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("", response_model=CommInDB, status_code=201)
|
||||||
|
async def create_comm(
|
||||||
|
body: CommCreate,
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
return await service.create_comm(body)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/email/all", response_model=MailListResponse)
|
||||||
|
async def list_all_emails(
|
||||||
|
direction: Optional[str] = Query(None),
|
||||||
|
customers_only: bool = Query(False),
|
||||||
|
mailbox: Optional[str] = Query(None, description="sales|support|both|all or account key"),
|
||||||
|
limit: int = Query(500, le=1000),
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||||
|
):
|
||||||
|
"""Return all email comms (all senders + unmatched), for the Mail page."""
|
||||||
|
selected_accounts = None
|
||||||
|
if mailbox and mailbox not in {"all", "both"}:
|
||||||
|
if mailbox == "sales":
|
||||||
|
selected_accounts = ["sales"]
|
||||||
|
elif mailbox == "support":
|
||||||
|
selected_accounts = ["support"]
|
||||||
|
else:
|
||||||
|
selected_accounts = [mailbox]
|
||||||
|
entries = await service.list_all_emails(
|
||||||
|
direction=direction,
|
||||||
|
customers_only=customers_only,
|
||||||
|
mail_accounts=selected_accounts,
|
||||||
|
limit=limit,
|
||||||
|
)
|
||||||
|
return MailListResponse(entries=entries, total=len(entries))
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/email/accounts")
|
||||||
|
async def list_mail_accounts(
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||||
|
):
|
||||||
|
accounts = get_mail_accounts()
|
||||||
|
return {
|
||||||
|
"accounts": [
|
||||||
|
{
|
||||||
|
"key": a["key"],
|
||||||
|
"label": a["label"],
|
||||||
|
"email": a["email"],
|
||||||
|
"sync_inbound": bool(a.get("sync_inbound")),
|
||||||
|
"allow_send": bool(a.get("allow_send")),
|
||||||
|
}
|
||||||
|
for a in accounts
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/email/check")
|
||||||
|
async def check_new_emails(
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||||
|
):
|
||||||
|
"""Lightweight check: returns how many emails are on the server vs. stored locally."""
|
||||||
|
return await email_sync.check_new_emails()
|
||||||
|
|
||||||
|
|
||||||
|
# Email endpoints — must be before /{comm_id} wildcard routes
|
||||||
|
@router.post("/email/send", response_model=EmailSendResponse)
|
||||||
|
async def send_email_endpoint(
|
||||||
|
customer_id: Optional[str] = Form(None),
|
||||||
|
from_account: Optional[str] = Form(None),
|
||||||
|
to: str = Form(...),
|
||||||
|
subject: str = Form(...),
|
||||||
|
body: str = Form(...),
|
||||||
|
body_html: str = Form(""),
|
||||||
|
cc: str = Form("[]"), # JSON-encoded list of strings
|
||||||
|
files: List[UploadFile] = File(default=[]),
|
||||||
|
user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
if not get_mail_accounts():
|
||||||
|
raise HTTPException(status_code=503, detail="SMTP not configured")
|
||||||
|
try:
|
||||||
|
cc_list: List[str] = json.loads(cc) if cc else []
|
||||||
|
except Exception:
|
||||||
|
cc_list = []
|
||||||
|
|
||||||
|
# Read all uploaded files into memory
|
||||||
|
file_attachments = []
|
||||||
|
for f in files:
|
||||||
|
content = await f.read()
|
||||||
|
mime_type = f.content_type or "application/octet-stream"
|
||||||
|
file_attachments.append((f.filename, content, mime_type))
|
||||||
|
|
||||||
|
from crm.email_sync import send_email
|
||||||
|
try:
|
||||||
|
entry = await send_email(
|
||||||
|
customer_id=customer_id or None,
|
||||||
|
from_account=from_account,
|
||||||
|
to=to,
|
||||||
|
subject=subject,
|
||||||
|
body=body,
|
||||||
|
body_html=body_html,
|
||||||
|
cc=cc_list,
|
||||||
|
sent_by=user.name or user.sub,
|
||||||
|
file_attachments=file_attachments if file_attachments else None,
|
||||||
|
)
|
||||||
|
except RuntimeError as e:
|
||||||
|
raise HTTPException(status_code=400, detail=str(e))
|
||||||
|
return EmailSendResponse(entry=entry)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/email/sync", response_model=EmailSyncResponse)
|
||||||
|
async def sync_email_endpoint(
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
if not get_mail_accounts():
|
||||||
|
raise HTTPException(status_code=503, detail="IMAP not configured")
|
||||||
|
from crm.email_sync import sync_emails
|
||||||
|
new_count = await sync_emails()
|
||||||
|
return EmailSyncResponse(new_count=new_count)
|
||||||
|
|
||||||
|
|
||||||
|
class SaveInlineRequest(BaseModel):
|
||||||
|
data_uri: str
|
||||||
|
filename: str
|
||||||
|
subfolder: str = "received_media"
|
||||||
|
mime_type: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
async def _resolve_customer_folder(customer_id: str) -> str:
|
||||||
|
"""Return the Nextcloud folder_id for a customer (falls back to customer_id)."""
|
||||||
|
from shared.firebase import get_db as get_firestore
|
||||||
|
firestore_db = get_firestore()
|
||||||
|
doc = firestore_db.collection("crm_customers").document(customer_id).get()
|
||||||
|
if not doc.exists:
|
||||||
|
raise HTTPException(status_code=404, detail="Customer not found")
|
||||||
|
data = doc.to_dict()
|
||||||
|
return data.get("folder_id") or customer_id
|
||||||
|
|
||||||
|
|
||||||
|
async def _upload_to_nc(folder_id: str, subfolder: str, filename: str,
|
||||||
|
content: bytes, mime_type: str, customer_id: str,
|
||||||
|
uploaded_by: str, tags: list[str]) -> dict:
|
||||||
|
from crm import nextcloud
|
||||||
|
target_folder = f"customers/{folder_id}/{subfolder}"
|
||||||
|
file_path = f"{target_folder}/{filename}"
|
||||||
|
await nextcloud.ensure_folder(target_folder)
|
||||||
|
await nextcloud.upload_file(file_path, content, mime_type)
|
||||||
|
media = await service.create_media(MediaCreate(
|
||||||
|
customer_id=customer_id,
|
||||||
|
filename=filename,
|
||||||
|
nextcloud_path=file_path,
|
||||||
|
mime_type=mime_type,
|
||||||
|
direction=MediaDirection.received,
|
||||||
|
tags=tags,
|
||||||
|
uploaded_by=uploaded_by,
|
||||||
|
))
|
||||||
|
return {"ok": True, "media_id": media.id, "nextcloud_path": file_path}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/email/{comm_id}/save-inline")
|
||||||
|
async def save_email_inline_image(
|
||||||
|
comm_id: str,
|
||||||
|
body: SaveInlineRequest,
|
||||||
|
user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
"""Save an inline image (data-URI from email HTML body) to Nextcloud."""
|
||||||
|
comm = await service.get_comm(comm_id)
|
||||||
|
customer_id = comm.customer_id
|
||||||
|
if not customer_id:
|
||||||
|
raise HTTPException(status_code=400, detail="This email is not linked to a customer")
|
||||||
|
|
||||||
|
folder_id = await _resolve_customer_folder(customer_id)
|
||||||
|
|
||||||
|
# Parse data URI
|
||||||
|
data_uri = body.data_uri
|
||||||
|
mime_type = body.mime_type or "image/png"
|
||||||
|
if "," in data_uri:
|
||||||
|
header, encoded = data_uri.split(",", 1)
|
||||||
|
try:
|
||||||
|
mime_type = header.split(":")[1].split(";")[0]
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
encoded = data_uri
|
||||||
|
try:
|
||||||
|
content = base64.b64decode(encoded)
|
||||||
|
except Exception:
|
||||||
|
raise HTTPException(status_code=400, detail="Invalid base64 data")
|
||||||
|
|
||||||
|
return await _upload_to_nc(
|
||||||
|
folder_id, body.subfolder, body.filename,
|
||||||
|
content, mime_type, customer_id,
|
||||||
|
user.name or user.sub, ["email-inline-image"],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/email/{comm_id}/save-attachment/{attachment_index}")
|
||||||
|
async def save_email_attachment(
|
||||||
|
comm_id: str,
|
||||||
|
attachment_index: int,
|
||||||
|
filename: str = Form(...),
|
||||||
|
subfolder: str = Form("received_media"),
|
||||||
|
user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Re-fetch a specific attachment from IMAP (by index in the email's attachment list)
|
||||||
|
and save it to the customer's Nextcloud media folder.
|
||||||
|
"""
|
||||||
|
import asyncio
|
||||||
|
comm = await service.get_comm(comm_id)
|
||||||
|
customer_id = comm.customer_id
|
||||||
|
if not customer_id:
|
||||||
|
raise HTTPException(status_code=400, detail="This email is not linked to a customer")
|
||||||
|
|
||||||
|
ext_message_id = comm.ext_message_id
|
||||||
|
if not ext_message_id:
|
||||||
|
raise HTTPException(status_code=400, detail="No message ID stored for this email")
|
||||||
|
|
||||||
|
attachments_meta = comm.attachments or []
|
||||||
|
if attachment_index < 0 or attachment_index >= len(attachments_meta):
|
||||||
|
raise HTTPException(status_code=400, detail="Attachment index out of range")
|
||||||
|
|
||||||
|
att_meta = attachments_meta[attachment_index]
|
||||||
|
mime_type = att_meta.content_type or "application/octet-stream"
|
||||||
|
from crm.mail_accounts import account_by_key, account_by_email
|
||||||
|
account = account_by_key(comm.mail_account) or account_by_email(comm.from_addr)
|
||||||
|
if not account:
|
||||||
|
raise HTTPException(status_code=400, detail="Email account config not found for this message")
|
||||||
|
|
||||||
|
# Re-fetch from IMAP in executor
|
||||||
|
def _fetch_attachment():
|
||||||
|
import imaplib, email as _email
|
||||||
|
if account.get("imap_use_ssl"):
|
||||||
|
imap = imaplib.IMAP4_SSL(account["imap_host"], int(account["imap_port"]))
|
||||||
|
else:
|
||||||
|
imap = imaplib.IMAP4(account["imap_host"], int(account["imap_port"]))
|
||||||
|
imap.login(account["imap_username"], account["imap_password"])
|
||||||
|
imap.select(account.get("imap_inbox", "INBOX"))
|
||||||
|
|
||||||
|
# Search by Message-ID header
|
||||||
|
_, data = imap.search(None, f'HEADER Message-ID "{ext_message_id}"')
|
||||||
|
uids = data[0].split() if data[0] else []
|
||||||
|
if not uids:
|
||||||
|
raise ValueError(f"Message not found on IMAP server: {ext_message_id}")
|
||||||
|
|
||||||
|
_, msg_data = imap.fetch(uids[0], "(RFC822)")
|
||||||
|
raw = msg_data[0][1]
|
||||||
|
msg = _email.message_from_bytes(raw)
|
||||||
|
imap.logout()
|
||||||
|
|
||||||
|
# Walk attachments in order — find the one at attachment_index
|
||||||
|
found_idx = 0
|
||||||
|
for part in msg.walk():
|
||||||
|
cd = str(part.get("Content-Disposition", ""))
|
||||||
|
if "attachment" not in cd:
|
||||||
|
continue
|
||||||
|
if found_idx == attachment_index:
|
||||||
|
payload = part.get_payload(decode=True)
|
||||||
|
if payload is None:
|
||||||
|
raise ValueError("Attachment payload is empty")
|
||||||
|
return payload
|
||||||
|
found_idx += 1
|
||||||
|
|
||||||
|
raise ValueError(f"Attachment index {attachment_index} not found in message")
|
||||||
|
|
||||||
|
loop = asyncio.get_event_loop()
|
||||||
|
try:
|
||||||
|
content = await loop.run_in_executor(None, _fetch_attachment)
|
||||||
|
except ValueError as e:
|
||||||
|
raise HTTPException(status_code=404, detail=str(e))
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=502, detail=f"IMAP fetch failed: {e}")
|
||||||
|
|
||||||
|
folder_id = await _resolve_customer_folder(customer_id)
|
||||||
|
return await _upload_to_nc(
|
||||||
|
folder_id, subfolder, filename,
|
||||||
|
content, mime_type, customer_id,
|
||||||
|
user.name or user.sub, ["email-attachment"],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class BulkDeleteRequest(BaseModel):
|
||||||
|
ids: List[str]
|
||||||
|
|
||||||
|
|
||||||
|
class ToggleImportantRequest(BaseModel):
|
||||||
|
important: bool
|
||||||
|
|
||||||
|
|
||||||
|
class ToggleReadRequest(BaseModel):
|
||||||
|
read: bool
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/bulk-delete", status_code=200)
|
||||||
|
async def bulk_delete_comms(
|
||||||
|
body: BulkDeleteRequest,
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
# Try remote IMAP delete for email rows first (best-effort), then local delete.
|
||||||
|
for comm_id in body.ids:
|
||||||
|
try:
|
||||||
|
comm = await service.get_comm(comm_id)
|
||||||
|
if comm.type == "email" and comm.ext_message_id:
|
||||||
|
await email_sync.delete_remote_email(
|
||||||
|
comm.ext_message_id,
|
||||||
|
comm.mail_account,
|
||||||
|
comm.from_addr,
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
# Keep delete resilient; local delete still proceeds.
|
||||||
|
pass
|
||||||
|
count = await service.delete_comms_bulk(body.ids)
|
||||||
|
return {"deleted": count}
|
||||||
|
|
||||||
|
|
||||||
|
@router.patch("/{comm_id}/important", response_model=CommInDB)
|
||||||
|
async def set_comm_important(
|
||||||
|
comm_id: str,
|
||||||
|
body: ToggleImportantRequest,
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
return await service.set_comm_important(comm_id, body.important)
|
||||||
|
|
||||||
|
|
||||||
|
@router.patch("/{comm_id}/read", response_model=CommInDB)
|
||||||
|
async def set_comm_read(
|
||||||
|
comm_id: str,
|
||||||
|
body: ToggleReadRequest,
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
comm = await service.get_comm(comm_id)
|
||||||
|
if comm.type == "email" and comm.ext_message_id:
|
||||||
|
await email_sync.set_remote_read(
|
||||||
|
comm.ext_message_id,
|
||||||
|
comm.mail_account,
|
||||||
|
comm.from_addr,
|
||||||
|
body.read,
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
return await service.set_comm_read(comm_id, body.read)
|
||||||
|
|
||||||
|
|
||||||
|
@router.put("/{comm_id}", response_model=CommInDB)
|
||||||
|
async def update_comm(
|
||||||
|
comm_id: str,
|
||||||
|
body: CommUpdate,
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
return await service.update_comm(comm_id, body)
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/{comm_id}", status_code=204)
|
||||||
|
async def delete_comm(
|
||||||
|
comm_id: str,
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
comm = await service.get_comm(comm_id)
|
||||||
|
if comm.type == "email" and comm.ext_message_id:
|
||||||
|
await email_sync.delete_remote_email(
|
||||||
|
comm.ext_message_id,
|
||||||
|
comm.mail_account,
|
||||||
|
comm.from_addr,
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
await service.delete_comm(comm_id)
|
||||||
245
backend/crm/customers_router.py
Normal file
@@ -0,0 +1,245 @@
|
|||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
from fastapi import APIRouter, Depends, Query, BackgroundTasks, Body
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from auth.models import TokenPayload
|
||||||
|
from auth.dependencies import require_permission
|
||||||
|
from crm.models import CustomerCreate, CustomerUpdate, CustomerInDB, CustomerListResponse, TransactionEntry
|
||||||
|
from crm import service, nextcloud
|
||||||
|
from config import settings
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/api/crm/customers", tags=["crm-customers"])
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("", response_model=CustomerListResponse)
|
||||||
|
async def list_customers(
|
||||||
|
search: Optional[str] = Query(None),
|
||||||
|
tag: Optional[str] = Query(None),
|
||||||
|
sort: Optional[str] = Query(None),
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||||
|
):
|
||||||
|
customers = service.list_customers(search=search, tag=tag, sort=sort)
|
||||||
|
if sort == "latest_comm":
|
||||||
|
customers = await service.list_customers_sorted_by_latest_comm(customers)
|
||||||
|
return CustomerListResponse(customers=customers, total=len(customers))
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/tags", response_model=list[str])
|
||||||
|
def list_tags(
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||||
|
):
|
||||||
|
return service.list_all_tags()
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{customer_id}", response_model=CustomerInDB)
|
||||||
|
def get_customer(
|
||||||
|
customer_id: str,
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||||
|
):
|
||||||
|
return service.get_customer(customer_id)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("", response_model=CustomerInDB, status_code=201)
|
||||||
|
async def create_customer(
|
||||||
|
body: CustomerCreate,
|
||||||
|
background_tasks: BackgroundTasks,
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
customer = service.create_customer(body)
|
||||||
|
if settings.nextcloud_url:
|
||||||
|
background_tasks.add_task(_init_nextcloud_folder, customer)
|
||||||
|
return customer
|
||||||
|
|
||||||
|
|
||||||
|
async def _init_nextcloud_folder(customer) -> None:
|
||||||
|
try:
|
||||||
|
nc_path = service.get_customer_nc_path(customer)
|
||||||
|
base = f"customers/{nc_path}"
|
||||||
|
for sub in ("media", "documents", "sent", "received"):
|
||||||
|
await nextcloud.ensure_folder(f"{base}/{sub}")
|
||||||
|
await nextcloud.write_info_file(base, customer.name, customer.id)
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning("Nextcloud folder init failed for customer %s: %s", customer.id, e)
|
||||||
|
|
||||||
|
|
||||||
|
@router.put("/{customer_id}", response_model=CustomerInDB)
|
||||||
|
def update_customer(
|
||||||
|
customer_id: str,
|
||||||
|
body: CustomerUpdate,
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
return service.update_customer(customer_id, body)
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/{customer_id}", status_code=204)
|
||||||
|
async def delete_customer(
|
||||||
|
customer_id: str,
|
||||||
|
wipe_comms: bool = Query(False),
|
||||||
|
wipe_files: bool = Query(False),
|
||||||
|
wipe_nextcloud: bool = Query(False),
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
customer = service.delete_customer(customer_id)
|
||||||
|
nc_path = service.get_customer_nc_path(customer)
|
||||||
|
|
||||||
|
if wipe_comms or wipe_nextcloud:
|
||||||
|
await service.delete_customer_comms(customer_id)
|
||||||
|
|
||||||
|
if wipe_files or wipe_nextcloud:
|
||||||
|
await service.delete_customer_media_entries(customer_id)
|
||||||
|
|
||||||
|
if settings.nextcloud_url:
|
||||||
|
folder = f"customers/{nc_path}"
|
||||||
|
if wipe_nextcloud:
|
||||||
|
try:
|
||||||
|
await nextcloud.delete_file(folder)
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning("Could not delete NC folder for customer %s: %s", customer_id, e)
|
||||||
|
elif wipe_files:
|
||||||
|
stale_folder = f"customers/STALE_{nc_path}"
|
||||||
|
try:
|
||||||
|
await nextcloud.rename_folder(folder, stale_folder)
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning("Could not rename NC folder for customer %s: %s", customer_id, e)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{customer_id}/last-comm-direction")
|
||||||
|
async def get_last_comm_direction(
|
||||||
|
customer_id: str,
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||||
|
):
|
||||||
|
result = await service.get_last_comm_direction(customer_id)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
# ── Relationship Status ───────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
@router.patch("/{customer_id}/relationship-status", response_model=CustomerInDB)
|
||||||
|
def update_relationship_status(
|
||||||
|
customer_id: str,
|
||||||
|
body: dict = Body(...),
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
return service.update_relationship_status(customer_id, body.get("status", ""))
|
||||||
|
|
||||||
|
|
||||||
|
# ── Technical Issues ──────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
@router.post("/{customer_id}/technical-issues", response_model=CustomerInDB)
|
||||||
|
def add_technical_issue(
|
||||||
|
customer_id: str,
|
||||||
|
body: dict = Body(...),
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
return service.add_technical_issue(
|
||||||
|
customer_id,
|
||||||
|
note=body.get("note", ""),
|
||||||
|
opened_by=body.get("opened_by", ""),
|
||||||
|
date=body.get("date"),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.patch("/{customer_id}/technical-issues/{index}/resolve", response_model=CustomerInDB)
|
||||||
|
def resolve_technical_issue(
|
||||||
|
customer_id: str,
|
||||||
|
index: int,
|
||||||
|
body: dict = Body(...),
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
return service.resolve_technical_issue(customer_id, index, body.get("resolved_by", ""))
|
||||||
|
|
||||||
|
|
||||||
|
@router.patch("/{customer_id}/technical-issues/{index}", response_model=CustomerInDB)
|
||||||
|
def edit_technical_issue(
|
||||||
|
customer_id: str,
|
||||||
|
index: int,
|
||||||
|
body: dict = Body(...),
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
return service.edit_technical_issue(customer_id, index, body.get("note", ""), body.get("opened_date"))
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/{customer_id}/technical-issues/{index}", response_model=CustomerInDB)
|
||||||
|
def delete_technical_issue(
|
||||||
|
customer_id: str,
|
||||||
|
index: int,
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
return service.delete_technical_issue(customer_id, index)
|
||||||
|
|
||||||
|
|
||||||
|
# ── Install Support ───────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
@router.post("/{customer_id}/install-support", response_model=CustomerInDB)
|
||||||
|
def add_install_support(
|
||||||
|
customer_id: str,
|
||||||
|
body: dict = Body(...),
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
return service.add_install_support(
|
||||||
|
customer_id,
|
||||||
|
note=body.get("note", ""),
|
||||||
|
opened_by=body.get("opened_by", ""),
|
||||||
|
date=body.get("date"),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.patch("/{customer_id}/install-support/{index}/resolve", response_model=CustomerInDB)
|
||||||
|
def resolve_install_support(
|
||||||
|
customer_id: str,
|
||||||
|
index: int,
|
||||||
|
body: dict = Body(...),
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
return service.resolve_install_support(customer_id, index, body.get("resolved_by", ""))
|
||||||
|
|
||||||
|
|
||||||
|
@router.patch("/{customer_id}/install-support/{index}", response_model=CustomerInDB)
|
||||||
|
def edit_install_support(
|
||||||
|
customer_id: str,
|
||||||
|
index: int,
|
||||||
|
body: dict = Body(...),
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
return service.edit_install_support(customer_id, index, body.get("note", ""), body.get("opened_date"))
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/{customer_id}/install-support/{index}", response_model=CustomerInDB)
|
||||||
|
def delete_install_support(
|
||||||
|
customer_id: str,
|
||||||
|
index: int,
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
return service.delete_install_support(customer_id, index)
|
||||||
|
|
||||||
|
|
||||||
|
# ── Transactions ──────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
@router.post("/{customer_id}/transactions", response_model=CustomerInDB)
|
||||||
|
def add_transaction(
|
||||||
|
customer_id: str,
|
||||||
|
body: TransactionEntry,
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
return service.add_transaction(customer_id, body)
|
||||||
|
|
||||||
|
|
||||||
|
@router.patch("/{customer_id}/transactions/{index}", response_model=CustomerInDB)
|
||||||
|
def update_transaction(
|
||||||
|
customer_id: str,
|
||||||
|
index: int,
|
||||||
|
body: TransactionEntry,
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
return service.update_transaction(customer_id, index, body)
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/{customer_id}/transactions/{index}", response_model=CustomerInDB)
|
||||||
|
def delete_transaction(
|
||||||
|
customer_id: str,
|
||||||
|
index: int,
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
return service.delete_transaction(customer_id, index)
|
||||||
837
backend/crm/email_sync.py
Normal file
@@ -0,0 +1,837 @@
|
|||||||
|
"""
|
||||||
|
IMAP email sync and SMTP email send for CRM.
|
||||||
|
Uses only stdlib imaplib/smtplib — no extra dependencies.
|
||||||
|
Sync is run in an executor to avoid blocking the event loop.
|
||||||
|
"""
|
||||||
|
import asyncio
|
||||||
|
import base64
|
||||||
|
import email
|
||||||
|
import email.header
|
||||||
|
import email.utils
|
||||||
|
import html.parser
|
||||||
|
import imaplib
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import re
|
||||||
|
import smtplib
|
||||||
|
import uuid
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
from email.mime.base import MIMEBase
|
||||||
|
from email.mime.multipart import MIMEMultipart
|
||||||
|
from email.mime.text import MIMEText
|
||||||
|
from email import encoders
|
||||||
|
from typing import List, Optional, Tuple
|
||||||
|
|
||||||
|
from config import settings
|
||||||
|
import database as mqtt_db
|
||||||
|
from crm.mail_accounts import get_mail_accounts, account_by_key, account_by_email
|
||||||
|
|
||||||
|
logger = logging.getLogger("crm.email_sync")
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Helpers
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _decode_header(raw: str) -> str:
|
||||||
|
"""Decode an RFC2047-encoded email header value."""
|
||||||
|
if not raw:
|
||||||
|
return ""
|
||||||
|
parts = email.header.decode_header(raw)
|
||||||
|
decoded = []
|
||||||
|
for part, enc in parts:
|
||||||
|
if isinstance(part, bytes):
|
||||||
|
decoded.append(part.decode(enc or "utf-8", errors="replace"))
|
||||||
|
else:
|
||||||
|
decoded.append(part)
|
||||||
|
return " ".join(decoded)
|
||||||
|
|
||||||
|
|
||||||
|
class _HTMLStripper(html.parser.HTMLParser):
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__()
|
||||||
|
self._text = []
|
||||||
|
|
||||||
|
def handle_data(self, data):
|
||||||
|
self._text.append(data)
|
||||||
|
|
||||||
|
def get_text(self):
|
||||||
|
return " ".join(self._text)
|
||||||
|
|
||||||
|
|
||||||
|
def _strip_html(html_str: str) -> str:
|
||||||
|
s = _HTMLStripper()
|
||||||
|
s.feed(html_str)
|
||||||
|
return s.get_text()
|
||||||
|
|
||||||
|
|
||||||
|
def _extract_inline_data_images(html_body: str) -> tuple[str, list[tuple[str, bytes, str]]]:
|
||||||
|
"""Replace data-URI images in HTML with cid: references and return inline parts.
|
||||||
|
Returns: (new_html, [(cid, image_bytes, mime_type), ...])
|
||||||
|
"""
|
||||||
|
if not html_body:
|
||||||
|
return "", []
|
||||||
|
|
||||||
|
inline_parts: list[tuple[str, bytes, str]] = []
|
||||||
|
seen: dict[str, str] = {} # data-uri -> cid
|
||||||
|
|
||||||
|
src_pattern = re.compile(r"""src=(['"])(data:image/[^'"]+)\1""", re.IGNORECASE)
|
||||||
|
data_pattern = re.compile(r"^data:(image/[a-zA-Z0-9.+-]+);base64,(.+)$", re.IGNORECASE | re.DOTALL)
|
||||||
|
|
||||||
|
def _replace(match: re.Match) -> str:
|
||||||
|
quote = match.group(1)
|
||||||
|
data_uri = match.group(2)
|
||||||
|
|
||||||
|
if data_uri in seen:
|
||||||
|
cid = seen[data_uri]
|
||||||
|
return f"src={quote}cid:{cid}{quote}"
|
||||||
|
|
||||||
|
parsed = data_pattern.match(data_uri)
|
||||||
|
if not parsed:
|
||||||
|
return match.group(0)
|
||||||
|
|
||||||
|
mime_type = parsed.group(1).lower()
|
||||||
|
b64_data = parsed.group(2).strip()
|
||||||
|
try:
|
||||||
|
payload = base64.b64decode(b64_data, validate=False)
|
||||||
|
except Exception:
|
||||||
|
return match.group(0)
|
||||||
|
|
||||||
|
cid = f"inline-{uuid.uuid4().hex}"
|
||||||
|
seen[data_uri] = cid
|
||||||
|
inline_parts.append((cid, payload, mime_type))
|
||||||
|
return f"src={quote}cid:{cid}{quote}"
|
||||||
|
|
||||||
|
return src_pattern.sub(_replace, html_body), inline_parts
|
||||||
|
|
||||||
|
|
||||||
|
def _load_customer_email_map() -> dict[str, str]:
|
||||||
|
"""Build a lookup of customer email -> customer_id from Firestore."""
|
||||||
|
from shared.firebase import get_db as get_firestore
|
||||||
|
firestore_db = get_firestore()
|
||||||
|
addr_to_customer: dict[str, str] = {}
|
||||||
|
for doc in firestore_db.collection("crm_customers").stream():
|
||||||
|
data = doc.to_dict() or {}
|
||||||
|
for contact in (data.get("contacts") or []):
|
||||||
|
if contact.get("type") == "email" and contact.get("value"):
|
||||||
|
addr_to_customer[str(contact["value"]).strip().lower()] = doc.id
|
||||||
|
return addr_to_customer
|
||||||
|
|
||||||
|
|
||||||
|
def _get_body(msg: email.message.Message) -> tuple[str, str]:
|
||||||
|
"""Extract (plain_text, html_body) from an email message.
|
||||||
|
Inline images (cid: references) are substituted with data-URIs so they
|
||||||
|
render correctly in a sandboxed iframe without external requests.
|
||||||
|
"""
|
||||||
|
import base64 as _b64
|
||||||
|
plain = None
|
||||||
|
html_body = None
|
||||||
|
# Map Content-ID → data-URI for inline images
|
||||||
|
cid_map: dict[str, str] = {}
|
||||||
|
|
||||||
|
if msg.is_multipart():
|
||||||
|
for part in msg.walk():
|
||||||
|
ct = part.get_content_type()
|
||||||
|
cd = str(part.get("Content-Disposition", ""))
|
||||||
|
cid = part.get("Content-ID", "").strip().strip("<>")
|
||||||
|
|
||||||
|
if "attachment" in cd:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if ct == "text/plain" and plain is None:
|
||||||
|
raw = part.get_payload(decode=True)
|
||||||
|
charset = part.get_content_charset() or "utf-8"
|
||||||
|
plain = raw.decode(charset, errors="replace")
|
||||||
|
elif ct == "text/html" and html_body is None:
|
||||||
|
raw = part.get_payload(decode=True)
|
||||||
|
charset = part.get_content_charset() or "utf-8"
|
||||||
|
html_body = raw.decode(charset, errors="replace")
|
||||||
|
elif ct.startswith("image/") and cid:
|
||||||
|
raw = part.get_payload(decode=True)
|
||||||
|
if raw:
|
||||||
|
b64 = _b64.b64encode(raw).decode("ascii")
|
||||||
|
cid_map[cid] = f"data:{ct};base64,{b64}"
|
||||||
|
else:
|
||||||
|
ct = msg.get_content_type()
|
||||||
|
payload = msg.get_payload(decode=True)
|
||||||
|
charset = msg.get_content_charset() or "utf-8"
|
||||||
|
if payload:
|
||||||
|
text = payload.decode(charset, errors="replace")
|
||||||
|
if ct == "text/plain":
|
||||||
|
plain = text
|
||||||
|
elif ct == "text/html":
|
||||||
|
html_body = text
|
||||||
|
|
||||||
|
# Substitute cid: references with data-URIs
|
||||||
|
if html_body and cid_map:
|
||||||
|
for cid, data_uri in cid_map.items():
|
||||||
|
html_body = html_body.replace(f"cid:{cid}", data_uri)
|
||||||
|
|
||||||
|
plain_text = (plain or (html_body and _strip_html(html_body)) or "").strip()
|
||||||
|
return plain_text, (html_body or "").strip()
|
||||||
|
|
||||||
|
|
||||||
|
def _get_attachments(msg: email.message.Message) -> list[dict]:
|
||||||
|
"""Extract attachment info (filename, content_type, size) without storing content."""
|
||||||
|
attachments = []
|
||||||
|
if msg.is_multipart():
|
||||||
|
for part in msg.walk():
|
||||||
|
cd = str(part.get("Content-Disposition", ""))
|
||||||
|
if "attachment" in cd:
|
||||||
|
filename = part.get_filename() or "attachment"
|
||||||
|
filename = _decode_header(filename)
|
||||||
|
ct = part.get_content_type() or "application/octet-stream"
|
||||||
|
payload = part.get_payload(decode=True)
|
||||||
|
size = len(payload) if payload else 0
|
||||||
|
attachments.append({"filename": filename, "content_type": ct, "size": size})
|
||||||
|
return attachments
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# IMAP sync (synchronous — called via run_in_executor)
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _sync_account_emails_sync(account: dict) -> tuple[list[dict], bool]:
|
||||||
|
if not account.get("imap_host") or not account.get("imap_username") or not account.get("imap_password"):
|
||||||
|
return [], False
|
||||||
|
if account.get("imap_use_ssl"):
|
||||||
|
imap = imaplib.IMAP4_SSL(account["imap_host"], int(account["imap_port"]))
|
||||||
|
else:
|
||||||
|
imap = imaplib.IMAP4(account["imap_host"], int(account["imap_port"]))
|
||||||
|
imap.login(account["imap_username"], account["imap_password"])
|
||||||
|
# readonly=True prevents marking messages as \Seen while syncing.
|
||||||
|
imap.select(account.get("imap_inbox", "INBOX"), readonly=True)
|
||||||
|
_, data = imap.search(None, "ALL")
|
||||||
|
uids = data[0].split() if data[0] else []
|
||||||
|
|
||||||
|
results = []
|
||||||
|
complete = True
|
||||||
|
for uid in uids:
|
||||||
|
try:
|
||||||
|
_, msg_data = imap.fetch(uid, "(FLAGS RFC822)")
|
||||||
|
meta = msg_data[0][0] if msg_data and isinstance(msg_data[0], tuple) else b""
|
||||||
|
raw = msg_data[0][1]
|
||||||
|
msg = email.message_from_bytes(raw)
|
||||||
|
message_id = msg.get("Message-ID", "").strip()
|
||||||
|
from_addr = email.utils.parseaddr(msg.get("From", ""))[1]
|
||||||
|
to_addrs_raw = msg.get("To", "")
|
||||||
|
to_addrs = [a for _, a in email.utils.getaddresses([to_addrs_raw])]
|
||||||
|
subject = _decode_header(msg.get("Subject", ""))
|
||||||
|
date_str = msg.get("Date", "")
|
||||||
|
try:
|
||||||
|
occurred_at = email.utils.parsedate_to_datetime(date_str).isoformat()
|
||||||
|
except Exception:
|
||||||
|
occurred_at = datetime.now(timezone.utc).isoformat()
|
||||||
|
is_read = b"\\Seen" in (meta or b"")
|
||||||
|
try:
|
||||||
|
body, body_html = _get_body(msg)
|
||||||
|
except Exception:
|
||||||
|
body, body_html = "", ""
|
||||||
|
try:
|
||||||
|
file_attachments = _get_attachments(msg)
|
||||||
|
except Exception:
|
||||||
|
file_attachments = []
|
||||||
|
results.append({
|
||||||
|
"mail_account": account["key"],
|
||||||
|
"message_id": message_id,
|
||||||
|
"from_addr": from_addr,
|
||||||
|
"to_addrs": to_addrs,
|
||||||
|
"subject": subject,
|
||||||
|
"body": body,
|
||||||
|
"body_html": body_html,
|
||||||
|
"attachments": file_attachments,
|
||||||
|
"occurred_at": occurred_at,
|
||||||
|
"is_read": bool(is_read),
|
||||||
|
})
|
||||||
|
except Exception as e:
|
||||||
|
complete = False
|
||||||
|
logger.warning(f"[EMAIL SYNC] Failed to parse message uid={uid} account={account['key']}: {e}")
|
||||||
|
imap.logout()
|
||||||
|
return results, complete
|
||||||
|
|
||||||
|
|
||||||
|
def _sync_emails_sync() -> tuple[list[dict], bool]:
|
||||||
|
all_msgs: list[dict] = []
|
||||||
|
all_complete = True
|
||||||
|
# Deduplicate by physical inbox source. Aliases often share the same mailbox.
|
||||||
|
seen_sources: set[tuple] = set()
|
||||||
|
for acc in get_mail_accounts():
|
||||||
|
if not acc.get("sync_inbound"):
|
||||||
|
continue
|
||||||
|
source = (
|
||||||
|
(acc.get("imap_host") or "").lower(),
|
||||||
|
int(acc.get("imap_port") or 0),
|
||||||
|
(acc.get("imap_username") or "").lower(),
|
||||||
|
(acc.get("imap_inbox") or "INBOX").upper(),
|
||||||
|
)
|
||||||
|
if source in seen_sources:
|
||||||
|
continue
|
||||||
|
seen_sources.add(source)
|
||||||
|
msgs, complete = _sync_account_emails_sync(acc)
|
||||||
|
all_msgs.extend(msgs)
|
||||||
|
all_complete = all_complete and complete
|
||||||
|
return all_msgs, all_complete
|
||||||
|
|
||||||
|
|
||||||
|
async def sync_emails() -> int:
|
||||||
|
"""
|
||||||
|
Pull emails from IMAP, match against CRM customers, store new ones.
|
||||||
|
Returns count of new entries created.
|
||||||
|
"""
|
||||||
|
if not get_mail_accounts():
|
||||||
|
return 0
|
||||||
|
|
||||||
|
loop = asyncio.get_event_loop()
|
||||||
|
try:
|
||||||
|
messages, fetch_complete = await loop.run_in_executor(None, _sync_emails_sync)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"[EMAIL SYNC] IMAP connect/fetch failed: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
db = await mqtt_db.get_db()
|
||||||
|
|
||||||
|
# Load all customer email contacts into a flat lookup: email -> customer_id
|
||||||
|
addr_to_customer = _load_customer_email_map()
|
||||||
|
|
||||||
|
# Load already-synced message-ids from DB
|
||||||
|
rows = await db.execute_fetchall(
|
||||||
|
"SELECT id, ext_message_id, COALESCE(mail_account, '') as mail_account, direction, is_read, customer_id "
|
||||||
|
"FROM crm_comms_log WHERE type='email' AND ext_message_id IS NOT NULL"
|
||||||
|
)
|
||||||
|
known_map = {
|
||||||
|
(r[1], r[2] or ""): {
|
||||||
|
"id": r[0],
|
||||||
|
"direction": r[3],
|
||||||
|
"is_read": int(r[4] or 0),
|
||||||
|
"customer_id": r[5],
|
||||||
|
}
|
||||||
|
for r in rows
|
||||||
|
}
|
||||||
|
|
||||||
|
new_count = 0
|
||||||
|
now = datetime.now(timezone.utc).isoformat()
|
||||||
|
server_ids_by_account: dict[str, set[str]] = {}
|
||||||
|
# Global inbound IDs from server snapshot, used to avoid account-classification delete oscillation.
|
||||||
|
inbound_server_ids: set[str] = set()
|
||||||
|
accounts = get_mail_accounts()
|
||||||
|
accounts_by_email = {a["email"].lower(): a for a in accounts}
|
||||||
|
# Initialize tracked inbound accounts even if inbox is empty.
|
||||||
|
for a in accounts:
|
||||||
|
if a.get("sync_inbound"):
|
||||||
|
server_ids_by_account[a["key"]] = set()
|
||||||
|
|
||||||
|
for msg in messages:
|
||||||
|
mid = msg["message_id"]
|
||||||
|
fetch_account_key = (msg.get("mail_account") or "").strip().lower()
|
||||||
|
from_addr = msg["from_addr"].lower()
|
||||||
|
to_addrs = [a.lower() for a in msg["to_addrs"]]
|
||||||
|
|
||||||
|
sender_acc = accounts_by_email.get(from_addr)
|
||||||
|
if sender_acc:
|
||||||
|
direction = "outbound"
|
||||||
|
resolved_account_key = sender_acc["key"]
|
||||||
|
customer_addrs = to_addrs
|
||||||
|
else:
|
||||||
|
direction = "inbound"
|
||||||
|
target_acc = None
|
||||||
|
for addr in to_addrs:
|
||||||
|
if addr in accounts_by_email:
|
||||||
|
target_acc = accounts_by_email[addr]
|
||||||
|
break
|
||||||
|
resolved_account_key = (target_acc["key"] if target_acc else fetch_account_key)
|
||||||
|
customer_addrs = [from_addr]
|
||||||
|
if target_acc and not target_acc.get("sync_inbound"):
|
||||||
|
# Ignore inbound for non-synced aliases (e.g. info/news).
|
||||||
|
continue
|
||||||
|
|
||||||
|
if direction == "inbound" and mid and resolved_account_key in server_ids_by_account:
|
||||||
|
server_ids_by_account[resolved_account_key].add(mid)
|
||||||
|
inbound_server_ids.add(mid)
|
||||||
|
# Find matching customer (may be None - we still store the email)
|
||||||
|
customer_id = None
|
||||||
|
for addr in customer_addrs:
|
||||||
|
if addr in addr_to_customer:
|
||||||
|
customer_id = addr_to_customer[addr]
|
||||||
|
break
|
||||||
|
|
||||||
|
if mid and (mid, resolved_account_key) in known_map:
|
||||||
|
existing = known_map[(mid, resolved_account_key)]
|
||||||
|
# Backfill customer linkage for rows created without customer_id.
|
||||||
|
if customer_id and not existing.get("customer_id"):
|
||||||
|
await db.execute(
|
||||||
|
"UPDATE crm_comms_log SET customer_id=? WHERE id=?",
|
||||||
|
(customer_id, existing["id"]),
|
||||||
|
)
|
||||||
|
# Existing inbound message: sync read/unread state from server.
|
||||||
|
if direction == "inbound":
|
||||||
|
server_read = 1 if msg.get("is_read") else 0
|
||||||
|
await db.execute(
|
||||||
|
"UPDATE crm_comms_log SET is_read=? "
|
||||||
|
"WHERE type='email' AND direction='inbound' AND ext_message_id=? AND mail_account=?",
|
||||||
|
(server_read, mid, resolved_account_key),
|
||||||
|
)
|
||||||
|
continue # already stored
|
||||||
|
|
||||||
|
attachments_json = json.dumps(msg.get("attachments") or [])
|
||||||
|
to_addrs_json = json.dumps(to_addrs)
|
||||||
|
|
||||||
|
entry_id = str(uuid.uuid4())
|
||||||
|
await db.execute(
|
||||||
|
"""INSERT INTO crm_comms_log
|
||||||
|
(id, customer_id, type, mail_account, direction, subject, body, body_html, attachments,
|
||||||
|
ext_message_id, from_addr, to_addrs, logged_by, occurred_at, created_at, is_read)
|
||||||
|
VALUES (?, ?, 'email', ?, ?, ?, ?, ?, ?, ?, ?, ?, 'system', ?, ?, ?)""",
|
||||||
|
(entry_id, customer_id, resolved_account_key, direction, msg["subject"], msg["body"],
|
||||||
|
msg.get("body_html", ""), attachments_json,
|
||||||
|
mid, from_addr, to_addrs_json, msg["occurred_at"], now, 1 if msg.get("is_read") else 0),
|
||||||
|
)
|
||||||
|
new_count += 1
|
||||||
|
|
||||||
|
# Mirror remote deletes based on global inbound message-id snapshot.
|
||||||
|
# To avoid transient IMAP inconsistency causing add/remove oscillation,
|
||||||
|
# require two consecutive "missing" syncs before local deletion.
|
||||||
|
sync_keys = [a["key"] for a in accounts if a.get("sync_inbound")]
|
||||||
|
if sync_keys and fetch_complete:
|
||||||
|
placeholders = ",".join("?" for _ in sync_keys)
|
||||||
|
local_rows = await db.execute_fetchall(
|
||||||
|
f"SELECT id, ext_message_id, mail_account FROM crm_comms_log "
|
||||||
|
f"WHERE type='email' AND direction='inbound' AND mail_account IN ({placeholders}) "
|
||||||
|
"AND ext_message_id IS NOT NULL",
|
||||||
|
sync_keys,
|
||||||
|
)
|
||||||
|
to_delete: list[str] = []
|
||||||
|
for row in local_rows:
|
||||||
|
row_id, ext_id, acc_key = row[0], row[1], row[2]
|
||||||
|
if not ext_id:
|
||||||
|
continue
|
||||||
|
state_key = f"missing_email::{acc_key}::{ext_id}"
|
||||||
|
if ext_id in inbound_server_ids:
|
||||||
|
await db.execute("DELETE FROM crm_sync_state WHERE key = ?", (state_key,))
|
||||||
|
continue
|
||||||
|
prev = await db.execute_fetchall("SELECT value FROM crm_sync_state WHERE key = ?", (state_key,))
|
||||||
|
prev_count = int(prev[0][0]) if prev and (prev[0][0] or "").isdigit() else 0
|
||||||
|
new_count = prev_count + 1
|
||||||
|
await db.execute(
|
||||||
|
"INSERT INTO crm_sync_state (key, value) VALUES (?, ?) "
|
||||||
|
"ON CONFLICT(key) DO UPDATE SET value=excluded.value",
|
||||||
|
(state_key, str(new_count)),
|
||||||
|
)
|
||||||
|
if new_count >= 2:
|
||||||
|
to_delete.append(row_id)
|
||||||
|
await db.execute("DELETE FROM crm_sync_state WHERE key = ?", (state_key,))
|
||||||
|
if to_delete:
|
||||||
|
del_ph = ",".join("?" for _ in to_delete)
|
||||||
|
await db.execute(f"DELETE FROM crm_comms_log WHERE id IN ({del_ph})", to_delete)
|
||||||
|
|
||||||
|
if new_count or server_ids_by_account:
|
||||||
|
await db.commit()
|
||||||
|
|
||||||
|
# Update last sync time
|
||||||
|
await db.execute(
|
||||||
|
"INSERT INTO crm_sync_state (key, value) VALUES ('last_email_sync', ?) "
|
||||||
|
"ON CONFLICT(key) DO UPDATE SET value=excluded.value",
|
||||||
|
(now,),
|
||||||
|
)
|
||||||
|
await db.commit()
|
||||||
|
|
||||||
|
logger.info(f"[EMAIL SYNC] Done — {new_count} new emails stored")
|
||||||
|
return new_count
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Lightweight new-mail check (synchronous — called via run_in_executor)
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _check_server_count_sync() -> int:
|
||||||
|
# Keep this for backward compatibility; no longer used by check_new_emails().
|
||||||
|
total = 0
|
||||||
|
seen_sources: set[tuple] = set()
|
||||||
|
for acc in get_mail_accounts():
|
||||||
|
if not acc.get("sync_inbound"):
|
||||||
|
continue
|
||||||
|
source = (
|
||||||
|
(acc.get("imap_host") or "").lower(),
|
||||||
|
int(acc.get("imap_port") or 0),
|
||||||
|
(acc.get("imap_username") or "").lower(),
|
||||||
|
(acc.get("imap_inbox") or "INBOX").upper(),
|
||||||
|
)
|
||||||
|
if source in seen_sources:
|
||||||
|
continue
|
||||||
|
seen_sources.add(source)
|
||||||
|
if acc.get("imap_use_ssl"):
|
||||||
|
imap = imaplib.IMAP4_SSL(acc["imap_host"], int(acc["imap_port"]))
|
||||||
|
else:
|
||||||
|
imap = imaplib.IMAP4(acc["imap_host"], int(acc["imap_port"]))
|
||||||
|
imap.login(acc["imap_username"], acc["imap_password"])
|
||||||
|
imap.select(acc.get("imap_inbox", "INBOX"), readonly=True)
|
||||||
|
_, data = imap.search(None, "ALL")
|
||||||
|
total += len(data[0].split()) if data[0] else 0
|
||||||
|
imap.logout()
|
||||||
|
return total
|
||||||
|
|
||||||
|
|
||||||
|
async def check_new_emails() -> dict:
|
||||||
|
"""
|
||||||
|
Compare server message count vs. locally stored count.
|
||||||
|
Returns {"new_count": int} — does NOT download or store anything.
|
||||||
|
"""
|
||||||
|
if not get_mail_accounts():
|
||||||
|
return {"new_count": 0}
|
||||||
|
|
||||||
|
loop = asyncio.get_event_loop()
|
||||||
|
try:
|
||||||
|
# Reuse same account-resolution logic as sync to avoid false positives.
|
||||||
|
messages, _ = await loop.run_in_executor(None, _sync_emails_sync)
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"[EMAIL CHECK] IMAP check failed: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
accounts = get_mail_accounts()
|
||||||
|
accounts_by_email = {a["email"].lower(): a for a in accounts}
|
||||||
|
db = await mqtt_db.get_db()
|
||||||
|
rows = await db.execute_fetchall(
|
||||||
|
"SELECT ext_message_id, COALESCE(mail_account, '') as mail_account FROM crm_comms_log "
|
||||||
|
"WHERE type='email' AND ext_message_id IS NOT NULL"
|
||||||
|
)
|
||||||
|
known_ids = {(r[0], r[1] or "") for r in rows}
|
||||||
|
|
||||||
|
new_count = 0
|
||||||
|
for msg in messages:
|
||||||
|
mid = (msg.get("message_id") or "").strip()
|
||||||
|
if not mid:
|
||||||
|
continue
|
||||||
|
fetch_account_key = (msg.get("mail_account") or "").strip().lower()
|
||||||
|
from_addr = (msg.get("from_addr") or "").lower()
|
||||||
|
to_addrs = [(a or "").lower() for a in (msg.get("to_addrs") or [])]
|
||||||
|
|
||||||
|
sender_acc = accounts_by_email.get(from_addr)
|
||||||
|
if sender_acc:
|
||||||
|
# Outbound copy in mailbox; not part of "new inbound mail" banner.
|
||||||
|
continue
|
||||||
|
|
||||||
|
target_acc = None
|
||||||
|
for addr in to_addrs:
|
||||||
|
if addr in accounts_by_email:
|
||||||
|
target_acc = accounts_by_email[addr]
|
||||||
|
break
|
||||||
|
resolved_account_key = (target_acc["key"] if target_acc else fetch_account_key)
|
||||||
|
if target_acc and not target_acc.get("sync_inbound"):
|
||||||
|
continue
|
||||||
|
if (mid, resolved_account_key) not in known_ids:
|
||||||
|
new_count += 1
|
||||||
|
|
||||||
|
return {"new_count": new_count}
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# SMTP send (synchronous — called via run_in_executor)
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _append_to_sent_sync(account: dict, raw_message: bytes) -> None:
|
||||||
|
"""Best-effort append of sent MIME message to IMAP Sent folder."""
|
||||||
|
if not raw_message:
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
if account.get("imap_use_ssl"):
|
||||||
|
imap = imaplib.IMAP4_SSL(account["imap_host"], int(account["imap_port"]))
|
||||||
|
else:
|
||||||
|
imap = imaplib.IMAP4(account["imap_host"], int(account["imap_port"]))
|
||||||
|
imap.login(account["imap_username"], account["imap_password"])
|
||||||
|
|
||||||
|
preferred = str(account.get("imap_sent") or "Sent").strip() or "Sent"
|
||||||
|
candidates = [preferred, "Sent", "INBOX.Sent", "Sent Items", "INBOX.Sent Items"]
|
||||||
|
seen = set()
|
||||||
|
ordered_candidates = []
|
||||||
|
for name in candidates:
|
||||||
|
key = name.lower()
|
||||||
|
if key not in seen:
|
||||||
|
seen.add(key)
|
||||||
|
ordered_candidates.append(name)
|
||||||
|
|
||||||
|
appended = False
|
||||||
|
for mailbox in ordered_candidates:
|
||||||
|
try:
|
||||||
|
status, _ = imap.append(mailbox, "\\Seen", None, raw_message)
|
||||||
|
if status == "OK":
|
||||||
|
appended = True
|
||||||
|
break
|
||||||
|
except Exception:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not appended:
|
||||||
|
logger.warning("[EMAIL SEND] Sent copy append failed for account=%s", account.get("key"))
|
||||||
|
imap.logout()
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning("[EMAIL SEND] IMAP append to Sent failed for account=%s: %s", account.get("key"), e)
|
||||||
|
|
||||||
|
|
||||||
|
def _send_email_sync(
|
||||||
|
account: dict,
|
||||||
|
to: str,
|
||||||
|
subject: str,
|
||||||
|
body: str,
|
||||||
|
body_html: str,
|
||||||
|
cc: List[str],
|
||||||
|
file_attachments: Optional[List[Tuple[str, bytes, str]]] = None,
|
||||||
|
) -> str:
|
||||||
|
"""Send via SMTP. Returns the Message-ID header.
|
||||||
|
file_attachments: list of (filename, content_bytes, mime_type)
|
||||||
|
"""
|
||||||
|
html_with_cids, inline_images = _extract_inline_data_images(body_html or "")
|
||||||
|
|
||||||
|
# Build body tree:
|
||||||
|
# - with inline images: related(alternative(text/plain, text/html), image parts)
|
||||||
|
# - without inline images: alternative(text/plain, text/html)
|
||||||
|
if inline_images:
|
||||||
|
body_part = MIMEMultipart("related")
|
||||||
|
alt_part = MIMEMultipart("alternative")
|
||||||
|
alt_part.attach(MIMEText(body, "plain", "utf-8"))
|
||||||
|
if html_with_cids:
|
||||||
|
alt_part.attach(MIMEText(html_with_cids, "html", "utf-8"))
|
||||||
|
body_part.attach(alt_part)
|
||||||
|
|
||||||
|
for idx, (cid, content, mime_type) in enumerate(inline_images, start=1):
|
||||||
|
maintype, _, subtype = mime_type.partition("/")
|
||||||
|
img_part = MIMEBase(maintype or "image", subtype or "png")
|
||||||
|
img_part.set_payload(content)
|
||||||
|
encoders.encode_base64(img_part)
|
||||||
|
img_part.add_header("Content-ID", f"<{cid}>")
|
||||||
|
img_part.add_header("Content-Disposition", "inline", filename=f"inline-{idx}.{subtype or 'png'}")
|
||||||
|
body_part.attach(img_part)
|
||||||
|
else:
|
||||||
|
body_part = MIMEMultipart("alternative")
|
||||||
|
body_part.attach(MIMEText(body, "plain", "utf-8"))
|
||||||
|
if body_html:
|
||||||
|
body_part.attach(MIMEText(body_html, "html", "utf-8"))
|
||||||
|
|
||||||
|
# Wrap with mixed only when classic file attachments exist.
|
||||||
|
if file_attachments:
|
||||||
|
msg = MIMEMultipart("mixed")
|
||||||
|
msg.attach(body_part)
|
||||||
|
else:
|
||||||
|
msg = body_part
|
||||||
|
|
||||||
|
from_addr = account["email"]
|
||||||
|
msg["From"] = from_addr
|
||||||
|
msg["To"] = to
|
||||||
|
msg["Subject"] = subject
|
||||||
|
if cc:
|
||||||
|
msg["Cc"] = ", ".join(cc)
|
||||||
|
|
||||||
|
msg_id = f"<{uuid.uuid4()}@bellsystems>"
|
||||||
|
msg["Message-ID"] = msg_id
|
||||||
|
|
||||||
|
# Attach files
|
||||||
|
for filename, content, mime_type in (file_attachments or []):
|
||||||
|
maintype, _, subtype = mime_type.partition("/")
|
||||||
|
part = MIMEBase(maintype or "application", subtype or "octet-stream")
|
||||||
|
part.set_payload(content)
|
||||||
|
encoders.encode_base64(part)
|
||||||
|
part.add_header("Content-Disposition", "attachment", filename=filename)
|
||||||
|
msg.attach(part)
|
||||||
|
|
||||||
|
recipients = [to] + cc
|
||||||
|
raw_for_append = msg.as_bytes()
|
||||||
|
if account.get("smtp_use_tls"):
|
||||||
|
server = smtplib.SMTP(account["smtp_host"], int(account["smtp_port"]))
|
||||||
|
server.starttls()
|
||||||
|
else:
|
||||||
|
server = smtplib.SMTP_SSL(account["smtp_host"], int(account["smtp_port"]))
|
||||||
|
|
||||||
|
server.login(account["smtp_username"], account["smtp_password"])
|
||||||
|
server.sendmail(from_addr, recipients, msg.as_string())
|
||||||
|
server.quit()
|
||||||
|
_append_to_sent_sync(account, raw_for_append)
|
||||||
|
|
||||||
|
return msg_id
|
||||||
|
|
||||||
|
|
||||||
|
async def send_email(
|
||||||
|
customer_id: str | None,
|
||||||
|
from_account: str | None,
|
||||||
|
to: str,
|
||||||
|
subject: str,
|
||||||
|
body: str,
|
||||||
|
body_html: str,
|
||||||
|
cc: List[str],
|
||||||
|
sent_by: str,
|
||||||
|
file_attachments: Optional[List[Tuple[str, bytes, str]]] = None,
|
||||||
|
) -> dict:
|
||||||
|
"""Send an email and record it in crm_comms_log. Returns the new log entry.
|
||||||
|
file_attachments: list of (filename, content_bytes, mime_type)
|
||||||
|
"""
|
||||||
|
accounts = get_mail_accounts()
|
||||||
|
if not accounts:
|
||||||
|
raise RuntimeError("SMTP not configured")
|
||||||
|
account = account_by_key(from_account) if from_account else None
|
||||||
|
if not account:
|
||||||
|
raise RuntimeError("Please select a valid sender account")
|
||||||
|
if not account.get("allow_send"):
|
||||||
|
raise RuntimeError("Selected account is not allowed to send")
|
||||||
|
if not account.get("smtp_host") or not account.get("smtp_username") or not account.get("smtp_password"):
|
||||||
|
raise RuntimeError("SMTP not configured for selected account")
|
||||||
|
|
||||||
|
# If the caller did not provide a customer_id (e.g. compose from Mail page),
|
||||||
|
# auto-link by matching recipient addresses against CRM customer emails.
|
||||||
|
resolved_customer_id = customer_id
|
||||||
|
if not resolved_customer_id:
|
||||||
|
addr_to_customer = _load_customer_email_map()
|
||||||
|
rcpts = [to, *cc]
|
||||||
|
parsed_rcpts = [addr for _, addr in email.utils.getaddresses(rcpts) if addr]
|
||||||
|
for addr in parsed_rcpts:
|
||||||
|
key = (addr or "").strip().lower()
|
||||||
|
if key in addr_to_customer:
|
||||||
|
resolved_customer_id = addr_to_customer[key]
|
||||||
|
break
|
||||||
|
|
||||||
|
loop = asyncio.get_event_loop()
|
||||||
|
import functools
|
||||||
|
msg_id = await loop.run_in_executor(
|
||||||
|
None,
|
||||||
|
functools.partial(_send_email_sync, account, to, subject, body, body_html, cc, file_attachments or []),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Upload attachments to Nextcloud and register in crm_media
|
||||||
|
comm_attachments = []
|
||||||
|
if file_attachments and resolved_customer_id:
|
||||||
|
from crm import nextcloud, service
|
||||||
|
from crm.models import MediaCreate, MediaDirection
|
||||||
|
from shared.firebase import get_db as get_firestore
|
||||||
|
firestore_db = get_firestore()
|
||||||
|
doc = firestore_db.collection("crm_customers").document(resolved_customer_id).get()
|
||||||
|
if doc.exists:
|
||||||
|
data = doc.to_dict()
|
||||||
|
# Build a minimal CustomerInDB-like object for get_customer_nc_path
|
||||||
|
folder_id = data.get("folder_id") or resolved_customer_id
|
||||||
|
nc_path = folder_id
|
||||||
|
|
||||||
|
for filename, content, mime_type in file_attachments:
|
||||||
|
# images/video → sent_media, everything else → documents
|
||||||
|
if mime_type.startswith("image/") or mime_type.startswith("video/"):
|
||||||
|
subfolder = "sent_media"
|
||||||
|
else:
|
||||||
|
subfolder = "documents"
|
||||||
|
target_folder = f"customers/{nc_path}/{subfolder}"
|
||||||
|
file_path = f"{target_folder}/{filename}"
|
||||||
|
try:
|
||||||
|
await nextcloud.ensure_folder(target_folder)
|
||||||
|
await nextcloud.upload_file(file_path, content, mime_type)
|
||||||
|
await service.create_media(MediaCreate(
|
||||||
|
customer_id=resolved_customer_id,
|
||||||
|
filename=filename,
|
||||||
|
nextcloud_path=file_path,
|
||||||
|
mime_type=mime_type,
|
||||||
|
direction=MediaDirection.sent,
|
||||||
|
tags=["email-attachment"],
|
||||||
|
uploaded_by=sent_by,
|
||||||
|
))
|
||||||
|
comm_attachments.append({"filename": filename, "nextcloud_path": file_path})
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"[EMAIL SEND] Failed to upload attachment {filename}: {e}")
|
||||||
|
|
||||||
|
now = datetime.now(timezone.utc).isoformat()
|
||||||
|
entry_id = str(uuid.uuid4())
|
||||||
|
db = await mqtt_db.get_db()
|
||||||
|
our_addr = account["email"].lower()
|
||||||
|
to_addrs_json = json.dumps([to] + cc)
|
||||||
|
attachments_json = json.dumps(comm_attachments)
|
||||||
|
await db.execute(
|
||||||
|
"""INSERT INTO crm_comms_log
|
||||||
|
(id, customer_id, type, mail_account, direction, subject, body, body_html, attachments,
|
||||||
|
ext_message_id, from_addr, to_addrs, logged_by, occurred_at, created_at)
|
||||||
|
VALUES (?, ?, 'email', ?, 'outbound', ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
|
||||||
|
(entry_id, resolved_customer_id, account["key"], subject, body, body_html, attachments_json, msg_id,
|
||||||
|
our_addr, to_addrs_json, sent_by, now, now),
|
||||||
|
)
|
||||||
|
await db.commit()
|
||||||
|
|
||||||
|
return {
|
||||||
|
"id": entry_id,
|
||||||
|
"customer_id": resolved_customer_id,
|
||||||
|
"type": "email",
|
||||||
|
"mail_account": account["key"],
|
||||||
|
"direction": "outbound",
|
||||||
|
"subject": subject,
|
||||||
|
"body": body,
|
||||||
|
"body_html": body_html,
|
||||||
|
"attachments": comm_attachments,
|
||||||
|
"ext_message_id": msg_id,
|
||||||
|
"from_addr": our_addr,
|
||||||
|
"to_addrs": [to] + cc,
|
||||||
|
"logged_by": sent_by,
|
||||||
|
"occurred_at": now,
|
||||||
|
"created_at": now,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _delete_remote_email_sync(account: dict, ext_message_id: str) -> bool:
|
||||||
|
if not ext_message_id:
|
||||||
|
return False
|
||||||
|
if account.get("imap_use_ssl"):
|
||||||
|
imap = imaplib.IMAP4_SSL(account["imap_host"], int(account["imap_port"]))
|
||||||
|
else:
|
||||||
|
imap = imaplib.IMAP4(account["imap_host"], int(account["imap_port"]))
|
||||||
|
imap.login(account["imap_username"], account["imap_password"])
|
||||||
|
imap.select(account.get("imap_inbox", "INBOX"))
|
||||||
|
_, data = imap.search(None, f'HEADER Message-ID "{ext_message_id}"')
|
||||||
|
uids = data[0].split() if data and data[0] else []
|
||||||
|
if not uids:
|
||||||
|
imap.logout()
|
||||||
|
return False
|
||||||
|
for uid in uids:
|
||||||
|
imap.store(uid, "+FLAGS", "\\Deleted")
|
||||||
|
imap.expunge()
|
||||||
|
imap.logout()
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
async def delete_remote_email(ext_message_id: str, mail_account: str | None, from_addr: str | None = None) -> bool:
|
||||||
|
account = account_by_key(mail_account) if mail_account else None
|
||||||
|
if not account:
|
||||||
|
account = account_by_email(from_addr)
|
||||||
|
if not account or not account.get("imap_host"):
|
||||||
|
return False
|
||||||
|
loop = asyncio.get_event_loop()
|
||||||
|
try:
|
||||||
|
return await loop.run_in_executor(None, lambda: _delete_remote_email_sync(account, ext_message_id))
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"[EMAIL DELETE] Failed remote delete for {ext_message_id}: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _set_remote_read_sync(account: dict, ext_message_id: str, read: bool) -> bool:
|
||||||
|
if not ext_message_id:
|
||||||
|
return False
|
||||||
|
if account.get("imap_use_ssl"):
|
||||||
|
imap = imaplib.IMAP4_SSL(account["imap_host"], int(account["imap_port"]))
|
||||||
|
else:
|
||||||
|
imap = imaplib.IMAP4(account["imap_host"], int(account["imap_port"]))
|
||||||
|
imap.login(account["imap_username"], account["imap_password"])
|
||||||
|
imap.select(account.get("imap_inbox", "INBOX"))
|
||||||
|
_, data = imap.search(None, f'HEADER Message-ID "{ext_message_id}"')
|
||||||
|
uids = data[0].split() if data and data[0] else []
|
||||||
|
if not uids:
|
||||||
|
imap.logout()
|
||||||
|
return False
|
||||||
|
flag_op = "+FLAGS" if read else "-FLAGS"
|
||||||
|
for uid in uids:
|
||||||
|
imap.store(uid, flag_op, "\\Seen")
|
||||||
|
imap.logout()
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
async def set_remote_read(ext_message_id: str, mail_account: str | None, from_addr: str | None, read: bool) -> bool:
|
||||||
|
account = account_by_key(mail_account) if mail_account else None
|
||||||
|
if not account:
|
||||||
|
account = account_by_email(from_addr)
|
||||||
|
if not account or not account.get("imap_host"):
|
||||||
|
return False
|
||||||
|
loop = asyncio.get_event_loop()
|
||||||
|
try:
|
||||||
|
return await loop.run_in_executor(None, lambda: _set_remote_read_sync(account, ext_message_id, read))
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"[EMAIL READ] Failed remote read update for {ext_message_id}: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
104
backend/crm/mail_accounts.py
Normal file
@@ -0,0 +1,104 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from config import settings
|
||||||
|
|
||||||
|
|
||||||
|
def _bool(v: Any, default: bool) -> bool:
|
||||||
|
if isinstance(v, bool):
|
||||||
|
return v
|
||||||
|
if isinstance(v, str):
|
||||||
|
return v.strip().lower() in {"1", "true", "yes", "on"}
|
||||||
|
if v is None:
|
||||||
|
return default
|
||||||
|
return bool(v)
|
||||||
|
|
||||||
|
|
||||||
|
def get_mail_accounts() -> list[dict]:
|
||||||
|
"""
|
||||||
|
Returns normalized account dictionaries.
|
||||||
|
Falls back to legacy single-account config if MAIL_ACCOUNTS_JSON is empty.
|
||||||
|
"""
|
||||||
|
configured = settings.mail_accounts
|
||||||
|
normalized: list[dict] = []
|
||||||
|
|
||||||
|
for idx, raw in enumerate(configured):
|
||||||
|
if not isinstance(raw, dict):
|
||||||
|
continue
|
||||||
|
key = str(raw.get("key") or "").strip().lower()
|
||||||
|
email = str(raw.get("email") or "").strip().lower()
|
||||||
|
if not key or not email:
|
||||||
|
continue
|
||||||
|
normalized.append(
|
||||||
|
{
|
||||||
|
"key": key,
|
||||||
|
"label": str(raw.get("label") or key.title()),
|
||||||
|
"email": email,
|
||||||
|
"imap_host": raw.get("imap_host") or settings.imap_host,
|
||||||
|
"imap_port": int(raw.get("imap_port") or settings.imap_port or 993),
|
||||||
|
"imap_username": raw.get("imap_username") or email,
|
||||||
|
"imap_password": raw.get("imap_password") or settings.imap_password,
|
||||||
|
"imap_use_ssl": _bool(raw.get("imap_use_ssl"), settings.imap_use_ssl),
|
||||||
|
"imap_inbox": str(raw.get("imap_inbox") or "INBOX"),
|
||||||
|
"imap_sent": str(raw.get("imap_sent") or "Sent"),
|
||||||
|
"smtp_host": raw.get("smtp_host") or settings.smtp_host,
|
||||||
|
"smtp_port": int(raw.get("smtp_port") or settings.smtp_port or 587),
|
||||||
|
"smtp_username": raw.get("smtp_username") or email,
|
||||||
|
"smtp_password": raw.get("smtp_password") or settings.smtp_password,
|
||||||
|
"smtp_use_tls": _bool(raw.get("smtp_use_tls"), settings.smtp_use_tls),
|
||||||
|
"sync_inbound": _bool(raw.get("sync_inbound"), True),
|
||||||
|
"allow_send": _bool(raw.get("allow_send"), True),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if normalized:
|
||||||
|
return normalized
|
||||||
|
|
||||||
|
# Legacy single-account fallback
|
||||||
|
if settings.imap_host or settings.smtp_host:
|
||||||
|
legacy_email = (settings.smtp_username or settings.imap_username or "").strip().lower()
|
||||||
|
if legacy_email:
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
"key": "default",
|
||||||
|
"label": "Default",
|
||||||
|
"email": legacy_email,
|
||||||
|
"imap_host": settings.imap_host,
|
||||||
|
"imap_port": settings.imap_port,
|
||||||
|
"imap_username": settings.imap_username,
|
||||||
|
"imap_password": settings.imap_password,
|
||||||
|
"imap_use_ssl": settings.imap_use_ssl,
|
||||||
|
"imap_inbox": "INBOX",
|
||||||
|
"imap_sent": "Sent",
|
||||||
|
"smtp_host": settings.smtp_host,
|
||||||
|
"smtp_port": settings.smtp_port,
|
||||||
|
"smtp_username": settings.smtp_username,
|
||||||
|
"smtp_password": settings.smtp_password,
|
||||||
|
"smtp_use_tls": settings.smtp_use_tls,
|
||||||
|
"sync_inbound": True,
|
||||||
|
"allow_send": True,
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
def account_by_key(key: str | None) -> dict | None:
|
||||||
|
k = (key or "").strip().lower()
|
||||||
|
if not k:
|
||||||
|
return None
|
||||||
|
for acc in get_mail_accounts():
|
||||||
|
if acc["key"] == k:
|
||||||
|
return acc
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def account_by_email(email_addr: str | None) -> dict | None:
|
||||||
|
e = (email_addr or "").strip().lower()
|
||||||
|
if not e:
|
||||||
|
return None
|
||||||
|
for acc in get_mail_accounts():
|
||||||
|
if acc["email"] == e:
|
||||||
|
return acc
|
||||||
|
return None
|
||||||
35
backend/crm/media_router.py
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
from fastapi import APIRouter, Depends, Query
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from auth.models import TokenPayload
|
||||||
|
from auth.dependencies import require_permission
|
||||||
|
from crm.models import MediaCreate, MediaInDB, MediaListResponse
|
||||||
|
from crm import service
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/api/crm/media", tags=["crm-media"])
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("", response_model=MediaListResponse)
|
||||||
|
async def list_media(
|
||||||
|
customer_id: Optional[str] = Query(None),
|
||||||
|
order_id: Optional[str] = Query(None),
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||||
|
):
|
||||||
|
items = await service.list_media(customer_id=customer_id, order_id=order_id)
|
||||||
|
return MediaListResponse(items=items, total=len(items))
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("", response_model=MediaInDB, status_code=201)
|
||||||
|
async def create_media(
|
||||||
|
body: MediaCreate,
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
return await service.create_media(body)
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/{media_id}", status_code=204)
|
||||||
|
async def delete_media(
|
||||||
|
media_id: str,
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
await service.delete_media(media_id)
|
||||||
443
backend/crm/models.py
Normal file
@@ -0,0 +1,443 @@
|
|||||||
|
from enum import Enum
|
||||||
|
from typing import Any, Dict, List, Optional
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
|
||||||
|
class ProductCategory(str, Enum):
|
||||||
|
controller = "controller"
|
||||||
|
striker = "striker"
|
||||||
|
clock = "clock"
|
||||||
|
part = "part"
|
||||||
|
repair_service = "repair_service"
|
||||||
|
|
||||||
|
|
||||||
|
class CostLineItem(BaseModel):
|
||||||
|
name: str
|
||||||
|
quantity: float = 1
|
||||||
|
price: float = 0.0
|
||||||
|
|
||||||
|
|
||||||
|
class ProductCosts(BaseModel):
|
||||||
|
labor_hours: Optional[float] = None
|
||||||
|
labor_rate: Optional[float] = None
|
||||||
|
items: List[CostLineItem] = []
|
||||||
|
total: Optional[float] = None
|
||||||
|
|
||||||
|
|
||||||
|
class ProductStock(BaseModel):
|
||||||
|
on_hand: int = 0
|
||||||
|
reserved: int = 0
|
||||||
|
available: int = 0
|
||||||
|
|
||||||
|
|
||||||
|
class ProductCreate(BaseModel):
|
||||||
|
name: str
|
||||||
|
sku: Optional[str] = None
|
||||||
|
category: ProductCategory
|
||||||
|
description: Optional[str] = None
|
||||||
|
name_en: Optional[str] = None
|
||||||
|
name_gr: Optional[str] = None
|
||||||
|
description_en: Optional[str] = None
|
||||||
|
description_gr: Optional[str] = None
|
||||||
|
price: float
|
||||||
|
currency: str = "EUR"
|
||||||
|
costs: Optional[ProductCosts] = None
|
||||||
|
stock: Optional[ProductStock] = None
|
||||||
|
active: bool = True
|
||||||
|
status: str = "active" # active | discontinued | planned
|
||||||
|
photo_url: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class ProductUpdate(BaseModel):
|
||||||
|
name: Optional[str] = None
|
||||||
|
sku: Optional[str] = None
|
||||||
|
category: Optional[ProductCategory] = None
|
||||||
|
description: Optional[str] = None
|
||||||
|
name_en: Optional[str] = None
|
||||||
|
name_gr: Optional[str] = None
|
||||||
|
description_en: Optional[str] = None
|
||||||
|
description_gr: Optional[str] = None
|
||||||
|
price: Optional[float] = None
|
||||||
|
currency: Optional[str] = None
|
||||||
|
costs: Optional[ProductCosts] = None
|
||||||
|
stock: Optional[ProductStock] = None
|
||||||
|
active: Optional[bool] = None
|
||||||
|
status: Optional[str] = None
|
||||||
|
photo_url: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class ProductInDB(ProductCreate):
|
||||||
|
id: str
|
||||||
|
created_at: str
|
||||||
|
updated_at: str
|
||||||
|
|
||||||
|
|
||||||
|
class ProductListResponse(BaseModel):
|
||||||
|
products: List[ProductInDB]
|
||||||
|
total: int
|
||||||
|
|
||||||
|
|
||||||
|
# ── Customers ────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
class ContactType(str, Enum):
|
||||||
|
email = "email"
|
||||||
|
phone = "phone"
|
||||||
|
whatsapp = "whatsapp"
|
||||||
|
other = "other"
|
||||||
|
|
||||||
|
|
||||||
|
class CustomerContact(BaseModel):
|
||||||
|
type: ContactType
|
||||||
|
label: str
|
||||||
|
value: str
|
||||||
|
primary: bool = False
|
||||||
|
|
||||||
|
|
||||||
|
class CustomerNote(BaseModel):
|
||||||
|
text: str
|
||||||
|
by: str
|
||||||
|
at: str
|
||||||
|
|
||||||
|
|
||||||
|
class OwnedItemType(str, Enum):
|
||||||
|
console_device = "console_device"
|
||||||
|
product = "product"
|
||||||
|
freetext = "freetext"
|
||||||
|
|
||||||
|
|
||||||
|
class OwnedItem(BaseModel):
|
||||||
|
type: OwnedItemType
|
||||||
|
# console_device fields
|
||||||
|
device_id: Optional[str] = None
|
||||||
|
label: Optional[str] = None
|
||||||
|
# product fields
|
||||||
|
product_id: Optional[str] = None
|
||||||
|
product_name: Optional[str] = None
|
||||||
|
quantity: Optional[int] = None
|
||||||
|
serial_numbers: Optional[List[str]] = None
|
||||||
|
# freetext fields
|
||||||
|
description: Optional[str] = None
|
||||||
|
serial_number: Optional[str] = None
|
||||||
|
notes: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class CustomerLocation(BaseModel):
|
||||||
|
address: Optional[str] = None
|
||||||
|
city: Optional[str] = None
|
||||||
|
postal_code: Optional[str] = None
|
||||||
|
region: Optional[str] = None
|
||||||
|
country: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
# ── New customer status models ────────────────────────────────────────────────
|
||||||
|
|
||||||
|
class TechnicalIssue(BaseModel):
|
||||||
|
active: bool = True
|
||||||
|
opened_date: str # ISO string
|
||||||
|
resolved_date: Optional[str] = None
|
||||||
|
note: str
|
||||||
|
opened_by: str
|
||||||
|
resolved_by: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class InstallSupportEntry(BaseModel):
|
||||||
|
active: bool = True
|
||||||
|
opened_date: str # ISO string
|
||||||
|
resolved_date: Optional[str] = None
|
||||||
|
note: str
|
||||||
|
opened_by: str
|
||||||
|
resolved_by: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class TransactionEntry(BaseModel):
|
||||||
|
date: str # ISO string
|
||||||
|
flow: str # "invoice" | "payment" | "refund" | "credit"
|
||||||
|
payment_type: Optional[str] = None # "cash" | "bank_transfer" | "card" | "paypal" — null for invoices
|
||||||
|
category: str # "full_payment" | "advance" | "installment"
|
||||||
|
amount: float
|
||||||
|
currency: str = "EUR"
|
||||||
|
invoice_ref: Optional[str] = None
|
||||||
|
order_ref: Optional[str] = None
|
||||||
|
recorded_by: str
|
||||||
|
note: str = ""
|
||||||
|
|
||||||
|
|
||||||
|
# Lightweight summary stored on customer doc for fast CustomerList expanded view
|
||||||
|
class CrmSummary(BaseModel):
|
||||||
|
active_order_status: Optional[str] = None
|
||||||
|
active_order_status_date: Optional[str] = None
|
||||||
|
active_order_title: Optional[str] = None
|
||||||
|
active_issues_count: int = 0
|
||||||
|
latest_issue_date: Optional[str] = None
|
||||||
|
active_support_count: int = 0
|
||||||
|
latest_support_date: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class CustomerCreate(BaseModel):
|
||||||
|
title: Optional[str] = None
|
||||||
|
name: str
|
||||||
|
surname: Optional[str] = None
|
||||||
|
organization: Optional[str] = None
|
||||||
|
religion: Optional[str] = None
|
||||||
|
contacts: List[CustomerContact] = []
|
||||||
|
notes: List[CustomerNote] = []
|
||||||
|
location: Optional[CustomerLocation] = None
|
||||||
|
language: str = "el"
|
||||||
|
tags: List[str] = []
|
||||||
|
owned_items: List[OwnedItem] = []
|
||||||
|
linked_user_ids: List[str] = []
|
||||||
|
nextcloud_folder: Optional[str] = None
|
||||||
|
folder_id: Optional[str] = None
|
||||||
|
relationship_status: str = "lead"
|
||||||
|
technical_issues: List[Dict[str, Any]] = []
|
||||||
|
install_support: List[Dict[str, Any]] = []
|
||||||
|
transaction_history: List[Dict[str, Any]] = []
|
||||||
|
crm_summary: Optional[Dict[str, Any]] = None
|
||||||
|
|
||||||
|
|
||||||
|
class CustomerUpdate(BaseModel):
|
||||||
|
title: Optional[str] = None
|
||||||
|
name: Optional[str] = None
|
||||||
|
surname: Optional[str] = None
|
||||||
|
organization: Optional[str] = None
|
||||||
|
religion: Optional[str] = None
|
||||||
|
contacts: Optional[List[CustomerContact]] = None
|
||||||
|
notes: Optional[List[CustomerNote]] = None
|
||||||
|
location: Optional[CustomerLocation] = None
|
||||||
|
language: Optional[str] = None
|
||||||
|
tags: Optional[List[str]] = None
|
||||||
|
owned_items: Optional[List[OwnedItem]] = None
|
||||||
|
linked_user_ids: Optional[List[str]] = None
|
||||||
|
nextcloud_folder: Optional[str] = None
|
||||||
|
relationship_status: Optional[str] = None
|
||||||
|
# folder_id intentionally excluded from update — set once at creation
|
||||||
|
|
||||||
|
|
||||||
|
class CustomerInDB(CustomerCreate):
|
||||||
|
id: str
|
||||||
|
created_at: str
|
||||||
|
updated_at: str
|
||||||
|
|
||||||
|
|
||||||
|
class CustomerListResponse(BaseModel):
|
||||||
|
customers: List[CustomerInDB]
|
||||||
|
total: int
|
||||||
|
|
||||||
|
|
||||||
|
# ── Orders ───────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
class OrderStatus(str, Enum):
|
||||||
|
negotiating = "negotiating"
|
||||||
|
awaiting_quotation = "awaiting_quotation"
|
||||||
|
awaiting_customer_confirmation = "awaiting_customer_confirmation"
|
||||||
|
awaiting_fulfilment = "awaiting_fulfilment"
|
||||||
|
awaiting_payment = "awaiting_payment"
|
||||||
|
manufacturing = "manufacturing"
|
||||||
|
shipped = "shipped"
|
||||||
|
installed = "installed"
|
||||||
|
declined = "declined"
|
||||||
|
complete = "complete"
|
||||||
|
|
||||||
|
|
||||||
|
class OrderPaymentStatus(BaseModel):
|
||||||
|
required_amount: float = 0
|
||||||
|
received_amount: float = 0
|
||||||
|
balance_due: float = 0
|
||||||
|
advance_required: bool = False
|
||||||
|
advance_amount: Optional[float] = None
|
||||||
|
payment_complete: bool = False
|
||||||
|
|
||||||
|
|
||||||
|
class OrderTimelineEvent(BaseModel):
|
||||||
|
date: str # ISO string
|
||||||
|
type: str # "quote_request" | "quote_sent" | "quote_accepted" | "quote_declined"
|
||||||
|
# | "mfg_started" | "mfg_complete" | "order_shipped" | "installed"
|
||||||
|
# | "payment_received" | "invoice_sent" | "note"
|
||||||
|
note: str = ""
|
||||||
|
updated_by: str
|
||||||
|
|
||||||
|
|
||||||
|
class OrderDiscount(BaseModel):
|
||||||
|
type: str # "percentage" | "fixed"
|
||||||
|
value: float = 0
|
||||||
|
reason: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class OrderShipping(BaseModel):
|
||||||
|
method: Optional[str] = None
|
||||||
|
tracking_number: Optional[str] = None
|
||||||
|
carrier: Optional[str] = None
|
||||||
|
shipped_at: Optional[str] = None
|
||||||
|
delivered_at: Optional[str] = None
|
||||||
|
destination: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class OrderItem(BaseModel):
|
||||||
|
type: str # console_device | product | freetext
|
||||||
|
product_id: Optional[str] = None
|
||||||
|
product_name: Optional[str] = None
|
||||||
|
description: Optional[str] = None
|
||||||
|
quantity: int = 1
|
||||||
|
unit_price: float = 0.0
|
||||||
|
serial_numbers: List[str] = []
|
||||||
|
|
||||||
|
|
||||||
|
class OrderCreate(BaseModel):
|
||||||
|
customer_id: str
|
||||||
|
order_number: Optional[str] = None
|
||||||
|
title: Optional[str] = None
|
||||||
|
created_by: Optional[str] = None
|
||||||
|
status: OrderStatus = OrderStatus.negotiating
|
||||||
|
status_updated_date: Optional[str] = None
|
||||||
|
status_updated_by: Optional[str] = None
|
||||||
|
items: List[OrderItem] = []
|
||||||
|
subtotal: float = 0
|
||||||
|
discount: Optional[OrderDiscount] = None
|
||||||
|
total_price: float = 0
|
||||||
|
currency: str = "EUR"
|
||||||
|
shipping: Optional[OrderShipping] = None
|
||||||
|
payment_status: Optional[Dict[str, Any]] = None
|
||||||
|
invoice_path: Optional[str] = None
|
||||||
|
notes: Optional[str] = None
|
||||||
|
timeline: List[Dict[str, Any]] = []
|
||||||
|
|
||||||
|
|
||||||
|
class OrderUpdate(BaseModel):
|
||||||
|
order_number: Optional[str] = None
|
||||||
|
title: Optional[str] = None
|
||||||
|
status: Optional[OrderStatus] = None
|
||||||
|
status_updated_date: Optional[str] = None
|
||||||
|
status_updated_by: Optional[str] = None
|
||||||
|
items: Optional[List[OrderItem]] = None
|
||||||
|
subtotal: Optional[float] = None
|
||||||
|
discount: Optional[OrderDiscount] = None
|
||||||
|
total_price: Optional[float] = None
|
||||||
|
currency: Optional[str] = None
|
||||||
|
shipping: Optional[OrderShipping] = None
|
||||||
|
payment_status: Optional[Dict[str, Any]] = None
|
||||||
|
invoice_path: Optional[str] = None
|
||||||
|
notes: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class OrderInDB(OrderCreate):
|
||||||
|
id: str
|
||||||
|
created_at: str
|
||||||
|
updated_at: str
|
||||||
|
|
||||||
|
|
||||||
|
class OrderListResponse(BaseModel):
|
||||||
|
orders: List[OrderInDB]
|
||||||
|
total: int
|
||||||
|
|
||||||
|
|
||||||
|
# ── Comms Log ─────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
class CommType(str, Enum):
|
||||||
|
email = "email"
|
||||||
|
whatsapp = "whatsapp"
|
||||||
|
call = "call"
|
||||||
|
sms = "sms"
|
||||||
|
note = "note"
|
||||||
|
in_person = "in_person"
|
||||||
|
|
||||||
|
|
||||||
|
class CommDirection(str, Enum):
|
||||||
|
inbound = "inbound"
|
||||||
|
outbound = "outbound"
|
||||||
|
internal = "internal"
|
||||||
|
|
||||||
|
|
||||||
|
class CommAttachment(BaseModel):
|
||||||
|
filename: str
|
||||||
|
nextcloud_path: Optional[str] = None
|
||||||
|
content_type: Optional[str] = None
|
||||||
|
size: Optional[int] = None
|
||||||
|
|
||||||
|
|
||||||
|
class CommCreate(BaseModel):
|
||||||
|
customer_id: Optional[str] = None
|
||||||
|
type: CommType
|
||||||
|
mail_account: Optional[str] = None
|
||||||
|
direction: CommDirection
|
||||||
|
subject: Optional[str] = None
|
||||||
|
body: Optional[str] = None
|
||||||
|
body_html: Optional[str] = None
|
||||||
|
attachments: List[CommAttachment] = []
|
||||||
|
ext_message_id: Optional[str] = None
|
||||||
|
from_addr: Optional[str] = None
|
||||||
|
to_addrs: Optional[List[str]] = None
|
||||||
|
logged_by: Optional[str] = None
|
||||||
|
occurred_at: Optional[str] = None # defaults to now if not provided
|
||||||
|
|
||||||
|
|
||||||
|
class CommUpdate(BaseModel):
|
||||||
|
type: Optional[CommType] = None
|
||||||
|
direction: Optional[CommDirection] = None
|
||||||
|
subject: Optional[str] = None
|
||||||
|
body: Optional[str] = None
|
||||||
|
logged_by: Optional[str] = None
|
||||||
|
occurred_at: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class CommInDB(BaseModel):
|
||||||
|
id: str
|
||||||
|
customer_id: Optional[str] = None
|
||||||
|
type: CommType
|
||||||
|
mail_account: Optional[str] = None
|
||||||
|
direction: CommDirection
|
||||||
|
subject: Optional[str] = None
|
||||||
|
body: Optional[str] = None
|
||||||
|
body_html: Optional[str] = None
|
||||||
|
attachments: List[CommAttachment] = []
|
||||||
|
ext_message_id: Optional[str] = None
|
||||||
|
from_addr: Optional[str] = None
|
||||||
|
to_addrs: Optional[List[str]] = None
|
||||||
|
logged_by: Optional[str] = None
|
||||||
|
occurred_at: str
|
||||||
|
created_at: str
|
||||||
|
is_important: bool = False
|
||||||
|
is_read: bool = False
|
||||||
|
|
||||||
|
|
||||||
|
class CommListResponse(BaseModel):
|
||||||
|
entries: List[CommInDB]
|
||||||
|
total: int
|
||||||
|
|
||||||
|
|
||||||
|
# ── Media ─────────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
class MediaDirection(str, Enum):
|
||||||
|
received = "received"
|
||||||
|
sent = "sent"
|
||||||
|
internal = "internal"
|
||||||
|
|
||||||
|
|
||||||
|
class MediaCreate(BaseModel):
|
||||||
|
customer_id: Optional[str] = None
|
||||||
|
order_id: Optional[str] = None
|
||||||
|
filename: str
|
||||||
|
nextcloud_path: str
|
||||||
|
mime_type: Optional[str] = None
|
||||||
|
direction: Optional[MediaDirection] = None
|
||||||
|
tags: List[str] = []
|
||||||
|
uploaded_by: Optional[str] = None
|
||||||
|
thumbnail_path: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class MediaInDB(BaseModel):
|
||||||
|
id: str
|
||||||
|
customer_id: Optional[str] = None
|
||||||
|
order_id: Optional[str] = None
|
||||||
|
filename: str
|
||||||
|
nextcloud_path: str
|
||||||
|
mime_type: Optional[str] = None
|
||||||
|
direction: Optional[MediaDirection] = None
|
||||||
|
tags: List[str] = []
|
||||||
|
uploaded_by: Optional[str] = None
|
||||||
|
created_at: str
|
||||||
|
thumbnail_path: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class MediaListResponse(BaseModel):
|
||||||
|
items: List[MediaInDB]
|
||||||
|
total: int
|
||||||
329
backend/crm/nextcloud.py
Normal file
@@ -0,0 +1,329 @@
|
|||||||
|
"""
|
||||||
|
Nextcloud WebDAV client.
|
||||||
|
|
||||||
|
All paths passed to these functions are relative to `settings.nextcloud_base_path`.
|
||||||
|
The full WebDAV URL is:
|
||||||
|
{nextcloud_url}/remote.php/dav/files/{username}/{base_path}/{relative_path}
|
||||||
|
"""
|
||||||
|
import xml.etree.ElementTree as ET
|
||||||
|
from typing import List
|
||||||
|
from urllib.parse import unquote
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
from fastapi import HTTPException
|
||||||
|
|
||||||
|
from config import settings
|
||||||
|
|
||||||
|
DAV_NS = "DAV:"
|
||||||
|
|
||||||
|
# Default timeout for all Nextcloud WebDAV requests (seconds)
|
||||||
|
_TIMEOUT = 60.0
|
||||||
|
|
||||||
|
# Shared async client — reuses TCP connections across requests so Nextcloud
|
||||||
|
# doesn't see rapid connection bursts that trigger brute-force throttling.
|
||||||
|
_http_client: httpx.AsyncClient | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def _get_client() -> httpx.AsyncClient:
|
||||||
|
global _http_client
|
||||||
|
if _http_client is None or _http_client.is_closed:
|
||||||
|
_http_client = httpx.AsyncClient(
|
||||||
|
timeout=_TIMEOUT,
|
||||||
|
follow_redirects=True,
|
||||||
|
headers={"User-Agent": "BellSystems-CP/1.0"},
|
||||||
|
)
|
||||||
|
return _http_client
|
||||||
|
|
||||||
|
|
||||||
|
async def close_client() -> None:
|
||||||
|
"""Close the shared HTTP client. Call this on application shutdown."""
|
||||||
|
global _http_client
|
||||||
|
if _http_client and not _http_client.is_closed:
|
||||||
|
await _http_client.aclose()
|
||||||
|
_http_client = None
|
||||||
|
|
||||||
|
|
||||||
|
async def keepalive_ping() -> None:
|
||||||
|
"""
|
||||||
|
Send a lightweight PROPFIND Depth:0 to the Nextcloud base folder to keep
|
||||||
|
the TCP connection alive. Safe to call even if Nextcloud is not configured.
|
||||||
|
"""
|
||||||
|
if not settings.nextcloud_url:
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
url = _base_url()
|
||||||
|
client = _get_client()
|
||||||
|
await client.request(
|
||||||
|
"PROPFIND",
|
||||||
|
url,
|
||||||
|
auth=_auth(),
|
||||||
|
headers={"Depth": "0", "Content-Type": "application/xml"},
|
||||||
|
content=_PROPFIND_BODY,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"[NEXTCLOUD KEEPALIVE] ping failed: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
def _dav_user() -> str:
|
||||||
|
"""The username used in the WebDAV URL path (may differ from the login username)."""
|
||||||
|
return settings.nextcloud_dav_user or settings.nextcloud_username
|
||||||
|
|
||||||
|
|
||||||
|
def _base_url() -> str:
|
||||||
|
if not settings.nextcloud_url:
|
||||||
|
raise HTTPException(status_code=503, detail="Nextcloud not configured")
|
||||||
|
return (
|
||||||
|
f"{settings.nextcloud_url.rstrip('/')}"
|
||||||
|
f"/remote.php/dav/files/{_dav_user()}"
|
||||||
|
f"/{settings.nextcloud_base_path}"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _auth() -> tuple[str, str]:
|
||||||
|
return (settings.nextcloud_username, settings.nextcloud_password)
|
||||||
|
|
||||||
|
|
||||||
|
def _full_url(relative_path: str) -> str:
|
||||||
|
"""Build full WebDAV URL for a relative path."""
|
||||||
|
path = relative_path.strip("/")
|
||||||
|
base = _base_url()
|
||||||
|
return f"{base}/{path}" if path else base
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_propfind(xml_bytes: bytes, base_path_prefix: str) -> List[dict]:
|
||||||
|
"""
|
||||||
|
Parse a PROPFIND XML response.
|
||||||
|
Returns list of file/folder entries, skipping the root itself.
|
||||||
|
"""
|
||||||
|
root = ET.fromstring(xml_bytes)
|
||||||
|
results = []
|
||||||
|
|
||||||
|
# The prefix we need to strip from D:href to get the relative path back
|
||||||
|
# href looks like: /remote.php/dav/files/user/BellSystems/Console/customers/abc/
|
||||||
|
dav_prefix = (
|
||||||
|
f"/remote.php/dav/files/{_dav_user()}"
|
||||||
|
f"/{settings.nextcloud_base_path}/"
|
||||||
|
)
|
||||||
|
|
||||||
|
for response in root.findall(f"{{{DAV_NS}}}response"):
|
||||||
|
href_el = response.find(f"{{{DAV_NS}}}href")
|
||||||
|
if href_el is None:
|
||||||
|
continue
|
||||||
|
href = unquote(href_el.text or "")
|
||||||
|
|
||||||
|
# Strip DAV prefix to get relative path within base_path
|
||||||
|
if href.startswith(dav_prefix):
|
||||||
|
rel = href[len(dav_prefix):].rstrip("/")
|
||||||
|
else:
|
||||||
|
rel = href
|
||||||
|
|
||||||
|
# Skip the folder itself (the root of the PROPFIND request)
|
||||||
|
if rel == base_path_prefix.strip("/"):
|
||||||
|
continue
|
||||||
|
|
||||||
|
propstat = response.find(f"{{{DAV_NS}}}propstat")
|
||||||
|
if propstat is None:
|
||||||
|
continue
|
||||||
|
prop = propstat.find(f"{{{DAV_NS}}}prop")
|
||||||
|
if prop is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# is_dir: resourcetype contains D:collection
|
||||||
|
resource_type = prop.find(f"{{{DAV_NS}}}resourcetype")
|
||||||
|
is_dir = resource_type is not None and resource_type.find(f"{{{DAV_NS}}}collection") is not None
|
||||||
|
|
||||||
|
content_type_el = prop.find(f"{{{DAV_NS}}}getcontenttype")
|
||||||
|
mime_type = content_type_el.text if content_type_el is not None else (
|
||||||
|
"inode/directory" if is_dir else "application/octet-stream"
|
||||||
|
)
|
||||||
|
|
||||||
|
size_el = prop.find(f"{{{DAV_NS}}}getcontentlength")
|
||||||
|
size = int(size_el.text) if size_el is not None and size_el.text else 0
|
||||||
|
|
||||||
|
modified_el = prop.find(f"{{{DAV_NS}}}getlastmodified")
|
||||||
|
last_modified = modified_el.text if modified_el is not None else None
|
||||||
|
|
||||||
|
filename = rel.split("/")[-1] if rel else ""
|
||||||
|
|
||||||
|
results.append({
|
||||||
|
"filename": filename,
|
||||||
|
"path": rel,
|
||||||
|
"mime_type": mime_type,
|
||||||
|
"size": size,
|
||||||
|
"last_modified": last_modified,
|
||||||
|
"is_dir": is_dir,
|
||||||
|
})
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
async def ensure_folder(relative_path: str) -> None:
|
||||||
|
"""
|
||||||
|
Create a folder (and all parents) in Nextcloud via MKCOL.
|
||||||
|
Includes the base_path segments so the full hierarchy is created from scratch.
|
||||||
|
Silently succeeds if folders already exist.
|
||||||
|
"""
|
||||||
|
# Build the complete path list: base_path segments + relative_path segments
|
||||||
|
base_parts = settings.nextcloud_base_path.strip("/").split("/")
|
||||||
|
rel_parts = relative_path.strip("/").split("/") if relative_path.strip("/") else []
|
||||||
|
all_parts = base_parts + rel_parts
|
||||||
|
|
||||||
|
dav_root = f"{settings.nextcloud_url.rstrip('/')}/remote.php/dav/files/{_dav_user()}"
|
||||||
|
client = _get_client()
|
||||||
|
built = ""
|
||||||
|
for part in all_parts:
|
||||||
|
built = f"{built}/{part}" if built else part
|
||||||
|
url = f"{dav_root}/{built}"
|
||||||
|
resp = await client.request("MKCOL", url, auth=_auth())
|
||||||
|
# 201 = created, 405/409 = already exists — both are fine
|
||||||
|
if resp.status_code not in (201, 405, 409):
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=502,
|
||||||
|
detail=f"Failed to create Nextcloud folder '{built}': {resp.status_code}",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def write_info_file(customer_folder: str, customer_name: str, customer_id: str) -> None:
|
||||||
|
"""Write a _info.txt stub into a new customer folder for human browsability."""
|
||||||
|
content = f"Customer: {customer_name}\nID: {customer_id}\n"
|
||||||
|
await upload_file(
|
||||||
|
f"{customer_folder}/_info.txt",
|
||||||
|
content.encode("utf-8"),
|
||||||
|
"text/plain",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
_PROPFIND_BODY = b"""<?xml version="1.0"?>
|
||||||
|
<D:propfind xmlns:D="DAV:">
|
||||||
|
<D:prop>
|
||||||
|
<D:resourcetype/>
|
||||||
|
<D:getcontenttype/>
|
||||||
|
<D:getcontentlength/>
|
||||||
|
<D:getlastmodified/>
|
||||||
|
</D:prop>
|
||||||
|
</D:propfind>"""
|
||||||
|
|
||||||
|
|
||||||
|
async def list_folder(relative_path: str) -> List[dict]:
|
||||||
|
"""
|
||||||
|
PROPFIND at depth=1 to list a folder's immediate children.
|
||||||
|
relative_path is relative to nextcloud_base_path.
|
||||||
|
"""
|
||||||
|
url = _full_url(relative_path)
|
||||||
|
client = _get_client()
|
||||||
|
resp = await client.request(
|
||||||
|
"PROPFIND",
|
||||||
|
url,
|
||||||
|
auth=_auth(),
|
||||||
|
headers={"Depth": "1", "Content-Type": "application/xml"},
|
||||||
|
content=_PROPFIND_BODY,
|
||||||
|
)
|
||||||
|
if resp.status_code == 404:
|
||||||
|
return []
|
||||||
|
if resp.status_code not in (207, 200):
|
||||||
|
raise HTTPException(status_code=502, detail=f"Nextcloud PROPFIND failed: {resp.status_code}")
|
||||||
|
return _parse_propfind(resp.content, relative_path)
|
||||||
|
|
||||||
|
|
||||||
|
async def list_folder_recursive(relative_path: str) -> List[dict]:
|
||||||
|
"""
|
||||||
|
Recursively list ALL files under a folder (any depth).
|
||||||
|
Tries Depth:infinity first (single call). Falls back to manual recursion
|
||||||
|
via Depth:1 if the server returns 403/400 (some servers disable infinity).
|
||||||
|
Returns only file entries (is_dir=False).
|
||||||
|
"""
|
||||||
|
url = _full_url(relative_path)
|
||||||
|
client = _get_client()
|
||||||
|
resp = await client.request(
|
||||||
|
"PROPFIND",
|
||||||
|
url,
|
||||||
|
auth=_auth(),
|
||||||
|
headers={"Depth": "infinity", "Content-Type": "application/xml"},
|
||||||
|
content=_PROPFIND_BODY,
|
||||||
|
)
|
||||||
|
|
||||||
|
if resp.status_code in (207, 200):
|
||||||
|
all_items = _parse_propfind(resp.content, relative_path)
|
||||||
|
return [item for item in all_items if not item["is_dir"]]
|
||||||
|
|
||||||
|
# Depth:infinity not supported — fall back to recursive Depth:1
|
||||||
|
if resp.status_code in (403, 400, 412):
|
||||||
|
return await _list_recursive_fallback(relative_path)
|
||||||
|
|
||||||
|
if resp.status_code == 404:
|
||||||
|
return []
|
||||||
|
|
||||||
|
raise HTTPException(status_code=502, detail=f"Nextcloud PROPFIND failed: {resp.status_code}")
|
||||||
|
|
||||||
|
|
||||||
|
async def _list_recursive_fallback(relative_path: str) -> List[dict]:
|
||||||
|
"""Manually recurse via Depth:1 calls when Depth:infinity is blocked."""
|
||||||
|
items = await list_folder(relative_path)
|
||||||
|
files = []
|
||||||
|
dirs = []
|
||||||
|
for item in items:
|
||||||
|
if item["is_dir"]:
|
||||||
|
dirs.append(item["path"])
|
||||||
|
else:
|
||||||
|
files.append(item)
|
||||||
|
for dir_path in dirs:
|
||||||
|
child_files = await _list_recursive_fallback(dir_path)
|
||||||
|
files.extend(child_files)
|
||||||
|
return files
|
||||||
|
|
||||||
|
|
||||||
|
async def upload_file(relative_path: str, content: bytes, mime_type: str) -> str:
|
||||||
|
"""
|
||||||
|
PUT a file to Nextcloud. Returns the relative_path on success.
|
||||||
|
relative_path includes filename, e.g. "customers/abc123/media/photo.jpg"
|
||||||
|
"""
|
||||||
|
url = _full_url(relative_path)
|
||||||
|
client = _get_client()
|
||||||
|
resp = await client.put(
|
||||||
|
url,
|
||||||
|
auth=_auth(),
|
||||||
|
content=content,
|
||||||
|
headers={"Content-Type": mime_type},
|
||||||
|
)
|
||||||
|
if resp.status_code not in (200, 201, 204):
|
||||||
|
raise HTTPException(status_code=502, detail=f"Nextcloud upload failed: {resp.status_code}")
|
||||||
|
return relative_path
|
||||||
|
|
||||||
|
|
||||||
|
async def download_file(relative_path: str) -> tuple[bytes, str]:
|
||||||
|
"""
|
||||||
|
GET a file from Nextcloud. Returns (bytes, mime_type).
|
||||||
|
"""
|
||||||
|
url = _full_url(relative_path)
|
||||||
|
client = _get_client()
|
||||||
|
resp = await client.get(url, auth=_auth())
|
||||||
|
if resp.status_code == 404:
|
||||||
|
raise HTTPException(status_code=404, detail="File not found in Nextcloud")
|
||||||
|
if resp.status_code != 200:
|
||||||
|
raise HTTPException(status_code=502, detail=f"Nextcloud download failed: {resp.status_code}")
|
||||||
|
mime = resp.headers.get("content-type", "application/octet-stream").split(";")[0].strip()
|
||||||
|
return resp.content, mime
|
||||||
|
|
||||||
|
|
||||||
|
async def delete_file(relative_path: str) -> None:
|
||||||
|
"""DELETE a file from Nextcloud."""
|
||||||
|
url = _full_url(relative_path)
|
||||||
|
client = _get_client()
|
||||||
|
resp = await client.request("DELETE", url, auth=_auth())
|
||||||
|
if resp.status_code not in (200, 204, 404):
|
||||||
|
raise HTTPException(status_code=502, detail=f"Nextcloud delete failed: {resp.status_code}")
|
||||||
|
|
||||||
|
|
||||||
|
async def rename_folder(old_relative_path: str, new_relative_path: str) -> None:
|
||||||
|
"""Rename/move a folder in Nextcloud using WebDAV MOVE."""
|
||||||
|
url = _full_url(old_relative_path)
|
||||||
|
destination = _full_url(new_relative_path)
|
||||||
|
client = _get_client()
|
||||||
|
resp = await client.request(
|
||||||
|
"MOVE",
|
||||||
|
url,
|
||||||
|
auth=_auth(),
|
||||||
|
headers={"Destination": destination, "Overwrite": "F"},
|
||||||
|
)
|
||||||
|
if resp.status_code not in (201, 204):
|
||||||
|
raise HTTPException(status_code=502, detail=f"Nextcloud rename failed: {resp.status_code}")
|
||||||
490
backend/crm/nextcloud_router.py
Normal file
@@ -0,0 +1,490 @@
|
|||||||
|
"""
|
||||||
|
Nextcloud WebDAV proxy endpoints.
|
||||||
|
|
||||||
|
Folder convention (all paths relative to nextcloud_base_path = BellSystems/Console):
|
||||||
|
customers/{folder_id}/media/
|
||||||
|
customers/{folder_id}/documents/
|
||||||
|
customers/{folder_id}/sent/
|
||||||
|
customers/{folder_id}/received/
|
||||||
|
|
||||||
|
folder_id = customer.folder_id if set, else customer.id (legacy fallback).
|
||||||
|
"""
|
||||||
|
from fastapi import APIRouter, Depends, Query, UploadFile, File, Form, Response, HTTPException, Request
|
||||||
|
from fastapi.responses import StreamingResponse
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from jose import JWTError
|
||||||
|
from auth.models import TokenPayload
|
||||||
|
from auth.dependencies import require_permission
|
||||||
|
from auth.utils import decode_access_token
|
||||||
|
from crm import nextcloud, service
|
||||||
|
from config import settings
|
||||||
|
from crm.models import MediaCreate, MediaDirection
|
||||||
|
from crm.thumbnails import generate_thumbnail
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/api/crm/nextcloud", tags=["crm-nextcloud"])
|
||||||
|
|
||||||
|
DIRECTION_MAP = {
|
||||||
|
"sent": MediaDirection.sent,
|
||||||
|
"received": MediaDirection.received,
|
||||||
|
"internal": MediaDirection.internal,
|
||||||
|
"media": MediaDirection.internal,
|
||||||
|
"documents": MediaDirection.internal,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/web-url")
|
||||||
|
async def get_web_url(
|
||||||
|
path: str = Query(..., description="Path relative to nextcloud_base_path"),
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Return the Nextcloud Files web-UI URL for a given file path.
|
||||||
|
Opens the parent folder with the file highlighted.
|
||||||
|
"""
|
||||||
|
if not settings.nextcloud_url:
|
||||||
|
raise HTTPException(status_code=503, detail="Nextcloud not configured")
|
||||||
|
base = settings.nextcloud_base_path.strip("/")
|
||||||
|
# path is relative to base, e.g. "customers/abc/media/photo.jpg"
|
||||||
|
parts = path.rsplit("/", 1)
|
||||||
|
folder_rel = parts[0] if len(parts) == 2 else ""
|
||||||
|
filename = parts[-1]
|
||||||
|
nc_dir = f"/{base}/{folder_rel}" if folder_rel else f"/{base}"
|
||||||
|
from urllib.parse import urlencode, quote
|
||||||
|
qs = urlencode({"dir": nc_dir, "scrollto": filename})
|
||||||
|
url = f"{settings.nextcloud_url.rstrip('/')}/index.php/apps/files/?{qs}"
|
||||||
|
return {"url": url}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/browse")
|
||||||
|
async def browse(
|
||||||
|
path: str = Query(..., description="Path relative to nextcloud_base_path"),
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||||
|
):
|
||||||
|
"""List immediate children of a Nextcloud folder."""
|
||||||
|
items = await nextcloud.list_folder(path)
|
||||||
|
return {"path": path, "items": items}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/browse-all")
|
||||||
|
async def browse_all(
|
||||||
|
customer_id: str = Query(..., description="Customer ID"),
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Recursively list ALL files for a customer across all subfolders and any depth.
|
||||||
|
Uses Depth:infinity (one WebDAV call) with automatic fallback to recursive Depth:1.
|
||||||
|
Each file item includes a 'subfolder' key derived from its path.
|
||||||
|
"""
|
||||||
|
customer = service.get_customer(customer_id)
|
||||||
|
nc_path = service.get_customer_nc_path(customer)
|
||||||
|
base = f"customers/{nc_path}"
|
||||||
|
|
||||||
|
all_files = await nextcloud.list_folder_recursive(base)
|
||||||
|
|
||||||
|
# Exclude _info.txt stubs — human-readable only, should never appear in the UI.
|
||||||
|
# .thumbs/ files are kept: the frontend needs them to build the thumbnail map
|
||||||
|
# (it already filters them out of the visible file list itself).
|
||||||
|
all_files = [
|
||||||
|
f for f in all_files
|
||||||
|
if not f["path"].endswith("/_info.txt")
|
||||||
|
]
|
||||||
|
|
||||||
|
# Tag each file with the top-level subfolder it lives under
|
||||||
|
for item in all_files:
|
||||||
|
parts = item["path"].split("/")
|
||||||
|
# path looks like: customers/{nc_path}/{subfolder}/[...]/filename
|
||||||
|
# parts[0]=customers, parts[1]={nc_path}, parts[2]={subfolder}
|
||||||
|
item["subfolder"] = parts[2] if len(parts) > 2 else "other"
|
||||||
|
|
||||||
|
return {"items": all_files}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/file")
|
||||||
|
async def proxy_file(
|
||||||
|
request: Request,
|
||||||
|
path: str = Query(..., description="Path relative to nextcloud_base_path"),
|
||||||
|
token: Optional[str] = Query(None, description="JWT token for browser-native requests (img src, video src, a href) that cannot send an Authorization header"),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Stream a file from Nextcloud through the backend (proxy).
|
||||||
|
Supports HTTP Range requests so videos can be seeked and start playing immediately.
|
||||||
|
Accepts auth via Authorization: Bearer header OR ?token= query param.
|
||||||
|
"""
|
||||||
|
if token is None:
|
||||||
|
raise HTTPException(status_code=403, detail="Not authenticated")
|
||||||
|
try:
|
||||||
|
decode_access_token(token)
|
||||||
|
except (JWTError, KeyError):
|
||||||
|
raise HTTPException(status_code=403, detail="Invalid token")
|
||||||
|
|
||||||
|
# Forward the Range header to Nextcloud so we get a true partial response
|
||||||
|
# without buffering the whole file into memory.
|
||||||
|
nc_url = nextcloud._full_url(path)
|
||||||
|
nc_auth = nextcloud._auth()
|
||||||
|
forward_headers = {}
|
||||||
|
range_header = request.headers.get("range")
|
||||||
|
if range_header:
|
||||||
|
forward_headers["Range"] = range_header
|
||||||
|
|
||||||
|
import httpx as _httpx
|
||||||
|
|
||||||
|
# Use a dedicated streaming client — httpx.stream() keeps the connection open
|
||||||
|
# for the lifetime of the generator, so we can't reuse the shared persistent client.
|
||||||
|
# We enter the stream context here to get headers immediately (no body buffering),
|
||||||
|
# then hand the body iterator to StreamingResponse.
|
||||||
|
stream_client = _httpx.AsyncClient(timeout=None, follow_redirects=True)
|
||||||
|
nc_resp_ctx = stream_client.stream("GET", nc_url, auth=nc_auth, headers=forward_headers)
|
||||||
|
nc_resp = await nc_resp_ctx.__aenter__()
|
||||||
|
|
||||||
|
if nc_resp.status_code == 404:
|
||||||
|
await nc_resp_ctx.__aexit__(None, None, None)
|
||||||
|
await stream_client.aclose()
|
||||||
|
raise HTTPException(status_code=404, detail="File not found in Nextcloud")
|
||||||
|
if nc_resp.status_code not in (200, 206):
|
||||||
|
await nc_resp_ctx.__aexit__(None, None, None)
|
||||||
|
await stream_client.aclose()
|
||||||
|
raise HTTPException(status_code=502, detail=f"Nextcloud returned {nc_resp.status_code}")
|
||||||
|
|
||||||
|
mime_type = nc_resp.headers.get("content-type", "application/octet-stream").split(";")[0].strip()
|
||||||
|
|
||||||
|
resp_headers = {"Accept-Ranges": "bytes"}
|
||||||
|
for h in ("content-range", "content-length"):
|
||||||
|
if h in nc_resp.headers:
|
||||||
|
resp_headers[h.title()] = nc_resp.headers[h]
|
||||||
|
|
||||||
|
async def _stream():
|
||||||
|
try:
|
||||||
|
async for chunk in nc_resp.aiter_bytes(chunk_size=64 * 1024):
|
||||||
|
yield chunk
|
||||||
|
finally:
|
||||||
|
await nc_resp_ctx.__aexit__(None, None, None)
|
||||||
|
await stream_client.aclose()
|
||||||
|
|
||||||
|
return StreamingResponse(
|
||||||
|
_stream(),
|
||||||
|
status_code=nc_resp.status_code,
|
||||||
|
media_type=mime_type,
|
||||||
|
headers=resp_headers,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.put("/file-put")
|
||||||
|
async def put_file(
|
||||||
|
request: Request,
|
||||||
|
path: str = Query(..., description="Path relative to nextcloud_base_path"),
|
||||||
|
token: Optional[str] = Query(None),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Overwrite a file in Nextcloud with a new body (used for TXT in-browser editing).
|
||||||
|
Auth via ?token= query param (same pattern as /file GET).
|
||||||
|
"""
|
||||||
|
if token is None:
|
||||||
|
raise HTTPException(status_code=403, detail="Not authenticated")
|
||||||
|
try:
|
||||||
|
decode_access_token(token)
|
||||||
|
except (JWTError, KeyError):
|
||||||
|
raise HTTPException(status_code=403, detail="Invalid token")
|
||||||
|
|
||||||
|
body = await request.body()
|
||||||
|
content_type = request.headers.get("content-type", "text/plain")
|
||||||
|
await nextcloud.upload_file(path, body, content_type)
|
||||||
|
return {"updated": path}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/upload")
|
||||||
|
async def upload_file(
|
||||||
|
file: UploadFile = File(...),
|
||||||
|
customer_id: str = Form(...),
|
||||||
|
subfolder: str = Form("media"), # "media" | "documents" | "sent" | "received"
|
||||||
|
direction: Optional[str] = Form(None),
|
||||||
|
tags: Optional[str] = Form(None),
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Upload a file to the customer's Nextcloud folder and record it in crm_media.
|
||||||
|
Uses the customer's folder_id as the NC path (falls back to UUID for legacy records).
|
||||||
|
"""
|
||||||
|
customer = service.get_customer(customer_id)
|
||||||
|
nc_path = service.get_customer_nc_path(customer)
|
||||||
|
|
||||||
|
target_folder = f"customers/{nc_path}/{subfolder}"
|
||||||
|
file_path = f"{target_folder}/{file.filename}"
|
||||||
|
|
||||||
|
# Ensure the target subfolder exists (idempotent, fast for existing folders)
|
||||||
|
await nextcloud.ensure_folder(target_folder)
|
||||||
|
|
||||||
|
# Read and upload
|
||||||
|
content = await file.read()
|
||||||
|
mime_type = file.content_type or "application/octet-stream"
|
||||||
|
await nextcloud.upload_file(file_path, content, mime_type)
|
||||||
|
|
||||||
|
# Generate and upload thumbnail (best-effort, non-blocking)
|
||||||
|
# Always stored as {stem}.jpg regardless of source extension so the thumb
|
||||||
|
# filename is unambiguous and the existence check can never false-positive.
|
||||||
|
thumb_path = None
|
||||||
|
try:
|
||||||
|
thumb_bytes = generate_thumbnail(content, mime_type, file.filename)
|
||||||
|
if thumb_bytes:
|
||||||
|
thumb_folder = f"{target_folder}/.thumbs"
|
||||||
|
stem = file.filename.rsplit(".", 1)[0] if "." in file.filename else file.filename
|
||||||
|
thumb_filename = f"{stem}.jpg"
|
||||||
|
thumb_nc_path = f"{thumb_folder}/{thumb_filename}"
|
||||||
|
await nextcloud.ensure_folder(thumb_folder)
|
||||||
|
await nextcloud.upload_file(thumb_nc_path, thumb_bytes, "image/jpeg")
|
||||||
|
thumb_path = thumb_nc_path
|
||||||
|
except Exception as e:
|
||||||
|
import logging
|
||||||
|
logging.getLogger(__name__).warning("Thumbnail generation failed for %s: %s", file.filename, e)
|
||||||
|
|
||||||
|
# Resolve direction
|
||||||
|
resolved_direction = None
|
||||||
|
if direction:
|
||||||
|
try:
|
||||||
|
resolved_direction = MediaDirection(direction)
|
||||||
|
except ValueError:
|
||||||
|
resolved_direction = DIRECTION_MAP.get(subfolder, MediaDirection.internal)
|
||||||
|
else:
|
||||||
|
resolved_direction = DIRECTION_MAP.get(subfolder, MediaDirection.internal)
|
||||||
|
|
||||||
|
# Save metadata record
|
||||||
|
tag_list = [t.strip() for t in tags.split(",")] if tags else []
|
||||||
|
media_record = await service.create_media(MediaCreate(
|
||||||
|
customer_id=customer_id,
|
||||||
|
filename=file.filename,
|
||||||
|
nextcloud_path=file_path,
|
||||||
|
mime_type=mime_type,
|
||||||
|
direction=resolved_direction,
|
||||||
|
tags=tag_list,
|
||||||
|
uploaded_by=_user.name,
|
||||||
|
thumbnail_path=thumb_path,
|
||||||
|
))
|
||||||
|
|
||||||
|
return media_record
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/file")
|
||||||
|
async def delete_file(
|
||||||
|
path: str = Query(..., description="Path relative to nextcloud_base_path"),
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
"""Delete a file from Nextcloud and remove the matching crm_media record if found."""
|
||||||
|
await nextcloud.delete_file(path)
|
||||||
|
|
||||||
|
# Best-effort: delete the DB record if one matches this path
|
||||||
|
media_list = await service.list_media()
|
||||||
|
for m in media_list:
|
||||||
|
if m.nextcloud_path == path:
|
||||||
|
try:
|
||||||
|
await service.delete_media(m.id)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
break
|
||||||
|
|
||||||
|
return {"deleted": path}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/init-customer-folder")
|
||||||
|
async def init_customer_folder(
|
||||||
|
customer_id: str = Form(...),
|
||||||
|
customer_name: str = Form(...),
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Create the standard folder structure for a customer in Nextcloud
|
||||||
|
and write an _info.txt stub for human readability.
|
||||||
|
"""
|
||||||
|
customer = service.get_customer(customer_id)
|
||||||
|
nc_path = service.get_customer_nc_path(customer)
|
||||||
|
base = f"customers/{nc_path}"
|
||||||
|
for sub in ("media", "documents", "sent", "received"):
|
||||||
|
await nextcloud.ensure_folder(f"{base}/{sub}")
|
||||||
|
await nextcloud.write_info_file(base, customer_name, customer_id)
|
||||||
|
return {"initialized": base}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/sync")
|
||||||
|
async def sync_nextcloud_files(
|
||||||
|
customer_id: str = Form(...),
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Scan the customer's Nextcloud folder and register any files not yet tracked in the DB.
|
||||||
|
Returns counts of newly synced and skipped (already tracked) files.
|
||||||
|
"""
|
||||||
|
customer = service.get_customer(customer_id)
|
||||||
|
nc_path = service.get_customer_nc_path(customer)
|
||||||
|
base = f"customers/{nc_path}"
|
||||||
|
|
||||||
|
# Collect all NC files recursively (handles nested folders at any depth)
|
||||||
|
all_nc_files = await nextcloud.list_folder_recursive(base)
|
||||||
|
# Skip .thumbs/ folder contents and the _info.txt stub — these are internal
|
||||||
|
all_nc_files = [
|
||||||
|
f for f in all_nc_files
|
||||||
|
if "/.thumbs/" not in f["path"] and not f["path"].endswith("/_info.txt")
|
||||||
|
]
|
||||||
|
for item in all_nc_files:
|
||||||
|
parts = item["path"].split("/")
|
||||||
|
item["_subfolder"] = parts[2] if len(parts) > 2 else "media"
|
||||||
|
|
||||||
|
# Get existing DB records for this customer
|
||||||
|
existing = await service.list_media(customer_id=customer_id)
|
||||||
|
tracked_paths = {m.nextcloud_path for m in existing}
|
||||||
|
|
||||||
|
synced = 0
|
||||||
|
skipped = 0
|
||||||
|
for f in all_nc_files:
|
||||||
|
if f["path"] in tracked_paths:
|
||||||
|
skipped += 1
|
||||||
|
continue
|
||||||
|
sub = f["_subfolder"]
|
||||||
|
direction = DIRECTION_MAP.get(sub, MediaDirection.internal)
|
||||||
|
await service.create_media(MediaCreate(
|
||||||
|
customer_id=customer_id,
|
||||||
|
filename=f["filename"],
|
||||||
|
nextcloud_path=f["path"],
|
||||||
|
mime_type=f.get("mime_type") or "application/octet-stream",
|
||||||
|
direction=direction,
|
||||||
|
tags=[],
|
||||||
|
uploaded_by="nextcloud-sync",
|
||||||
|
))
|
||||||
|
synced += 1
|
||||||
|
|
||||||
|
return {"synced": synced, "skipped": skipped}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/generate-thumbs")
|
||||||
|
async def generate_thumbs(
|
||||||
|
customer_id: str = Form(...),
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Scan all customer files in Nextcloud and generate thumbnails for any file
|
||||||
|
that doesn't already have one in the corresponding .thumbs/ sub-folder.
|
||||||
|
Skips files inside .thumbs/ itself and file types that can't be thumbnailed.
|
||||||
|
Returns counts of generated, skipped (already exists), and failed files.
|
||||||
|
"""
|
||||||
|
customer = service.get_customer(customer_id)
|
||||||
|
nc_path = service.get_customer_nc_path(customer)
|
||||||
|
base = f"customers/{nc_path}"
|
||||||
|
|
||||||
|
all_nc_files = await nextcloud.list_folder_recursive(base)
|
||||||
|
|
||||||
|
# Build a set of existing thumb paths for O(1) lookup
|
||||||
|
existing_thumbs = {
|
||||||
|
f["path"] for f in all_nc_files if "/.thumbs/" in f["path"]
|
||||||
|
}
|
||||||
|
|
||||||
|
# Only process real files (not thumbs themselves)
|
||||||
|
candidates = [f for f in all_nc_files if "/.thumbs/" not in f["path"]]
|
||||||
|
|
||||||
|
generated = 0
|
||||||
|
skipped = 0
|
||||||
|
failed = 0
|
||||||
|
|
||||||
|
for f in candidates:
|
||||||
|
# Derive where the thumb would live
|
||||||
|
path = f["path"] # e.g. customers/{nc_path}/{subfolder}/photo.jpg
|
||||||
|
parts = path.rsplit("/", 1)
|
||||||
|
if len(parts) != 2:
|
||||||
|
skipped += 1
|
||||||
|
continue
|
||||||
|
parent_folder, filename = parts
|
||||||
|
stem = filename.rsplit(".", 1)[0] if "." in filename else filename
|
||||||
|
thumb_filename = f"{stem}.jpg"
|
||||||
|
thumb_nc_path = f"{parent_folder}/.thumbs/{thumb_filename}"
|
||||||
|
|
||||||
|
if thumb_nc_path in existing_thumbs:
|
||||||
|
skipped += 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Download the file, generate thumb, upload
|
||||||
|
try:
|
||||||
|
content, mime_type = await nextcloud.download_file(path)
|
||||||
|
thumb_bytes = generate_thumbnail(content, mime_type, filename)
|
||||||
|
if not thumb_bytes:
|
||||||
|
skipped += 1 # unsupported file type
|
||||||
|
continue
|
||||||
|
thumb_folder = f"{parent_folder}/.thumbs"
|
||||||
|
await nextcloud.ensure_folder(thumb_folder)
|
||||||
|
await nextcloud.upload_file(thumb_nc_path, thumb_bytes, "image/jpeg")
|
||||||
|
generated += 1
|
||||||
|
except Exception as e:
|
||||||
|
import logging
|
||||||
|
logging.getLogger(__name__).warning("Thumb gen failed for %s: %s", path, e)
|
||||||
|
failed += 1
|
||||||
|
|
||||||
|
return {"generated": generated, "skipped": skipped, "failed": failed}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/clear-thumbs")
|
||||||
|
async def clear_thumbs(
|
||||||
|
customer_id: str = Form(...),
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Delete all .thumbs sub-folders for a customer across all subfolders.
|
||||||
|
This lets you regenerate thumbnails from scratch.
|
||||||
|
Returns count of .thumbs folders deleted.
|
||||||
|
"""
|
||||||
|
customer = service.get_customer(customer_id)
|
||||||
|
nc_path = service.get_customer_nc_path(customer)
|
||||||
|
base = f"customers/{nc_path}"
|
||||||
|
|
||||||
|
all_nc_files = await nextcloud.list_folder_recursive(base)
|
||||||
|
|
||||||
|
# Collect unique .thumbs folder paths
|
||||||
|
thumb_folders = set()
|
||||||
|
for f in all_nc_files:
|
||||||
|
if "/.thumbs/" in f["path"]:
|
||||||
|
folder = f["path"].split("/.thumbs/")[0] + "/.thumbs"
|
||||||
|
thumb_folders.add(folder)
|
||||||
|
|
||||||
|
deleted = 0
|
||||||
|
for folder in thumb_folders:
|
||||||
|
try:
|
||||||
|
await nextcloud.delete_file(folder)
|
||||||
|
deleted += 1
|
||||||
|
except Exception as e:
|
||||||
|
import logging
|
||||||
|
logging.getLogger(__name__).warning("Failed to delete .thumbs folder %s: %s", folder, e)
|
||||||
|
|
||||||
|
return {"deleted_folders": deleted}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/untrack-deleted")
|
||||||
|
async def untrack_deleted_files(
|
||||||
|
customer_id: str = Form(...),
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Remove DB records for files that no longer exist in Nextcloud.
|
||||||
|
Returns count of untracked records.
|
||||||
|
"""
|
||||||
|
customer = service.get_customer(customer_id)
|
||||||
|
nc_path = service.get_customer_nc_path(customer)
|
||||||
|
base = f"customers/{nc_path}"
|
||||||
|
|
||||||
|
# Collect all NC file paths recursively (excluding thumbs and info stub)
|
||||||
|
all_nc_files = await nextcloud.list_folder_recursive(base)
|
||||||
|
nc_paths = {
|
||||||
|
item["path"] for item in all_nc_files
|
||||||
|
if "/.thumbs/" not in item["path"] and not item["path"].endswith("/_info.txt")
|
||||||
|
}
|
||||||
|
|
||||||
|
# Find DB records whose NC path no longer exists, OR that are internal files
|
||||||
|
# (_info.txt / .thumbs/) which should never have been tracked in the first place.
|
||||||
|
existing = await service.list_media(customer_id=customer_id)
|
||||||
|
untracked = 0
|
||||||
|
for m in existing:
|
||||||
|
is_internal = m.nextcloud_path and (
|
||||||
|
"/.thumbs/" in m.nextcloud_path or m.nextcloud_path.endswith("/_info.txt")
|
||||||
|
)
|
||||||
|
if m.nextcloud_path and (is_internal or m.nextcloud_path not in nc_paths):
|
||||||
|
try:
|
||||||
|
await service.delete_media(m.id)
|
||||||
|
untracked += 1
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return {"untracked": untracked}
|
||||||
151
backend/crm/orders_router.py
Normal file
@@ -0,0 +1,151 @@
|
|||||||
|
from fastapi import APIRouter, Depends, Query
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from auth.models import TokenPayload
|
||||||
|
from auth.dependencies import require_permission
|
||||||
|
from crm.models import OrderCreate, OrderUpdate, OrderInDB, OrderListResponse
|
||||||
|
from crm import service
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/api/crm/customers/{customer_id}/orders", tags=["crm-orders"])
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("", response_model=OrderListResponse)
|
||||||
|
def list_orders(
|
||||||
|
customer_id: str,
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||||
|
):
|
||||||
|
orders = service.list_orders(customer_id)
|
||||||
|
return OrderListResponse(orders=orders, total=len(orders))
|
||||||
|
|
||||||
|
|
||||||
|
# IMPORTANT: specific sub-paths must come before /{order_id}
|
||||||
|
@router.get("/next-order-number")
|
||||||
|
def get_next_order_number(
|
||||||
|
customer_id: str,
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||||
|
):
|
||||||
|
"""Return the next globally unique order number (ORD-DDMMYY-NNN across all customers)."""
|
||||||
|
return {"order_number": service._generate_order_number(customer_id)}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/init-negotiations", response_model=OrderInDB, status_code=201)
|
||||||
|
def init_negotiations(
|
||||||
|
customer_id: str,
|
||||||
|
body: dict,
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
return service.init_negotiations(
|
||||||
|
customer_id=customer_id,
|
||||||
|
title=body.get("title", ""),
|
||||||
|
note=body.get("note", ""),
|
||||||
|
date=body.get("date"),
|
||||||
|
created_by=body.get("created_by", ""),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("", response_model=OrderInDB, status_code=201)
|
||||||
|
def create_order(
|
||||||
|
customer_id: str,
|
||||||
|
body: OrderCreate,
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
return service.create_order(customer_id, body)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{order_id}", response_model=OrderInDB)
|
||||||
|
def get_order(
|
||||||
|
customer_id: str,
|
||||||
|
order_id: str,
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||||
|
):
|
||||||
|
return service.get_order(customer_id, order_id)
|
||||||
|
|
||||||
|
|
||||||
|
@router.patch("/{order_id}", response_model=OrderInDB)
|
||||||
|
def update_order(
|
||||||
|
customer_id: str,
|
||||||
|
order_id: str,
|
||||||
|
body: OrderUpdate,
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
return service.update_order(customer_id, order_id, body)
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/{order_id}", status_code=204)
|
||||||
|
def delete_order(
|
||||||
|
customer_id: str,
|
||||||
|
order_id: str,
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
service.delete_order(customer_id, order_id)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{order_id}/timeline", response_model=OrderInDB)
|
||||||
|
def append_timeline_event(
|
||||||
|
customer_id: str,
|
||||||
|
order_id: str,
|
||||||
|
body: dict,
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
return service.append_timeline_event(customer_id, order_id, body)
|
||||||
|
|
||||||
|
|
||||||
|
@router.patch("/{order_id}/timeline/{index}", response_model=OrderInDB)
|
||||||
|
def update_timeline_event(
|
||||||
|
customer_id: str,
|
||||||
|
order_id: str,
|
||||||
|
index: int,
|
||||||
|
body: dict,
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
return service.update_timeline_event(customer_id, order_id, index, body)
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/{order_id}/timeline/{index}", response_model=OrderInDB)
|
||||||
|
def delete_timeline_event(
|
||||||
|
customer_id: str,
|
||||||
|
order_id: str,
|
||||||
|
index: int,
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
return service.delete_timeline_event(customer_id, order_id, index)
|
||||||
|
|
||||||
|
|
||||||
|
@router.patch("/{order_id}/payment-status", response_model=OrderInDB)
|
||||||
|
def update_payment_status(
|
||||||
|
customer_id: str,
|
||||||
|
order_id: str,
|
||||||
|
body: dict,
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
return service.update_order_payment_status(customer_id, order_id, body)
|
||||||
|
|
||||||
|
|
||||||
|
# ── Global order list (collection group) ─────────────────────────────────────
|
||||||
|
# Separate router registered at /api/crm/orders for the global OrderList page
|
||||||
|
|
||||||
|
global_router = APIRouter(prefix="/api/crm/orders", tags=["crm-orders-global"])
|
||||||
|
|
||||||
|
|
||||||
|
@global_router.get("")
|
||||||
|
def list_all_orders(
|
||||||
|
status: Optional[str] = Query(None),
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||||
|
):
|
||||||
|
orders = service.list_all_orders(status=status)
|
||||||
|
# Enrich with customer names
|
||||||
|
customer_ids = list({o.customer_id for o in orders if o.customer_id})
|
||||||
|
customer_names: dict[str, str] = {}
|
||||||
|
for cid in customer_ids:
|
||||||
|
try:
|
||||||
|
c = service.get_customer(cid)
|
||||||
|
parts = [c.name, c.organization] if c.organization else [c.name]
|
||||||
|
customer_names[cid] = " / ".join(filter(None, parts))
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
enriched = []
|
||||||
|
for o in orders:
|
||||||
|
d = o.model_dump()
|
||||||
|
d["customer_name"] = customer_names.get(o.customer_id)
|
||||||
|
enriched.append(d)
|
||||||
|
return {"orders": enriched, "total": len(enriched)}
|
||||||
158
backend/crm/quotation_models.py
Normal file
@@ -0,0 +1,158 @@
|
|||||||
|
from enum import Enum
|
||||||
|
from typing import Any, Dict, List, Optional
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
|
||||||
|
class QuotationStatus(str, Enum):
|
||||||
|
draft = "draft"
|
||||||
|
sent = "sent"
|
||||||
|
accepted = "accepted"
|
||||||
|
rejected = "rejected"
|
||||||
|
|
||||||
|
|
||||||
|
class QuotationItemCreate(BaseModel):
|
||||||
|
product_id: Optional[str] = None
|
||||||
|
description: Optional[str] = None
|
||||||
|
description_en: Optional[str] = None
|
||||||
|
description_gr: Optional[str] = None
|
||||||
|
unit_type: str = "pcs" # pcs / kg / m
|
||||||
|
unit_cost: float = 0.0
|
||||||
|
discount_percent: float = 0.0
|
||||||
|
quantity: float = 1.0
|
||||||
|
vat_percent: float = 24.0
|
||||||
|
sort_order: int = 0
|
||||||
|
|
||||||
|
|
||||||
|
class QuotationItemInDB(QuotationItemCreate):
|
||||||
|
id: str
|
||||||
|
quotation_id: str
|
||||||
|
line_total: float = 0.0
|
||||||
|
|
||||||
|
|
||||||
|
class QuotationCreate(BaseModel):
|
||||||
|
customer_id: str
|
||||||
|
title: Optional[str] = None
|
||||||
|
subtitle: Optional[str] = None
|
||||||
|
language: str = "en" # en / gr
|
||||||
|
order_type: Optional[str] = None
|
||||||
|
shipping_method: Optional[str] = None
|
||||||
|
estimated_shipping_date: Optional[str] = None
|
||||||
|
global_discount_label: Optional[str] = None
|
||||||
|
global_discount_percent: float = 0.0
|
||||||
|
shipping_cost: float = 0.0
|
||||||
|
shipping_cost_discount: float = 0.0
|
||||||
|
install_cost: float = 0.0
|
||||||
|
install_cost_discount: float = 0.0
|
||||||
|
extras_label: Optional[str] = None
|
||||||
|
extras_cost: float = 0.0
|
||||||
|
comments: List[str] = []
|
||||||
|
quick_notes: Optional[Dict[str, Any]] = None
|
||||||
|
items: List[QuotationItemCreate] = []
|
||||||
|
# Client override fields (for this quotation only; customer record is not modified)
|
||||||
|
client_org: Optional[str] = None
|
||||||
|
client_name: Optional[str] = None
|
||||||
|
client_location: Optional[str] = None
|
||||||
|
client_phone: Optional[str] = None
|
||||||
|
client_email: Optional[str] = None
|
||||||
|
# Legacy quotation fields
|
||||||
|
is_legacy: bool = False
|
||||||
|
legacy_date: Optional[str] = None # ISO date string, manually set
|
||||||
|
legacy_pdf_path: Optional[str] = None # Nextcloud path to uploaded PDF
|
||||||
|
|
||||||
|
|
||||||
|
class QuotationUpdate(BaseModel):
|
||||||
|
title: Optional[str] = None
|
||||||
|
subtitle: Optional[str] = None
|
||||||
|
language: Optional[str] = None
|
||||||
|
status: Optional[QuotationStatus] = None
|
||||||
|
order_type: Optional[str] = None
|
||||||
|
shipping_method: Optional[str] = None
|
||||||
|
estimated_shipping_date: Optional[str] = None
|
||||||
|
global_discount_label: Optional[str] = None
|
||||||
|
global_discount_percent: Optional[float] = None
|
||||||
|
shipping_cost: Optional[float] = None
|
||||||
|
shipping_cost_discount: Optional[float] = None
|
||||||
|
install_cost: Optional[float] = None
|
||||||
|
install_cost_discount: Optional[float] = None
|
||||||
|
extras_label: Optional[str] = None
|
||||||
|
extras_cost: Optional[float] = None
|
||||||
|
comments: Optional[List[str]] = None
|
||||||
|
quick_notes: Optional[Dict[str, Any]] = None
|
||||||
|
items: Optional[List[QuotationItemCreate]] = None
|
||||||
|
# Client override fields
|
||||||
|
client_org: Optional[str] = None
|
||||||
|
client_name: Optional[str] = None
|
||||||
|
client_location: Optional[str] = None
|
||||||
|
client_phone: Optional[str] = None
|
||||||
|
client_email: Optional[str] = None
|
||||||
|
# Legacy quotation fields
|
||||||
|
is_legacy: Optional[bool] = None
|
||||||
|
legacy_date: Optional[str] = None
|
||||||
|
legacy_pdf_path: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class QuotationInDB(BaseModel):
|
||||||
|
id: str
|
||||||
|
quotation_number: str
|
||||||
|
customer_id: str
|
||||||
|
title: Optional[str] = None
|
||||||
|
subtitle: Optional[str] = None
|
||||||
|
language: str = "en"
|
||||||
|
status: QuotationStatus = QuotationStatus.draft
|
||||||
|
order_type: Optional[str] = None
|
||||||
|
shipping_method: Optional[str] = None
|
||||||
|
estimated_shipping_date: Optional[str] = None
|
||||||
|
global_discount_label: Optional[str] = None
|
||||||
|
global_discount_percent: float = 0.0
|
||||||
|
shipping_cost: float = 0.0
|
||||||
|
shipping_cost_discount: float = 0.0
|
||||||
|
install_cost: float = 0.0
|
||||||
|
install_cost_discount: float = 0.0
|
||||||
|
extras_label: Optional[str] = None
|
||||||
|
extras_cost: float = 0.0
|
||||||
|
comments: List[str] = []
|
||||||
|
quick_notes: Dict[str, Any] = {}
|
||||||
|
subtotal_before_discount: float = 0.0
|
||||||
|
global_discount_amount: float = 0.0
|
||||||
|
new_subtotal: float = 0.0
|
||||||
|
vat_amount: float = 0.0
|
||||||
|
final_total: float = 0.0
|
||||||
|
nextcloud_pdf_path: Optional[str] = None
|
||||||
|
nextcloud_pdf_url: Optional[str] = None
|
||||||
|
created_at: str
|
||||||
|
updated_at: str
|
||||||
|
items: List[QuotationItemInDB] = []
|
||||||
|
# Client override fields
|
||||||
|
client_org: Optional[str] = None
|
||||||
|
client_name: Optional[str] = None
|
||||||
|
client_location: Optional[str] = None
|
||||||
|
client_phone: Optional[str] = None
|
||||||
|
client_email: Optional[str] = None
|
||||||
|
# Legacy quotation fields
|
||||||
|
is_legacy: bool = False
|
||||||
|
legacy_date: Optional[str] = None
|
||||||
|
legacy_pdf_path: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class QuotationListItem(BaseModel):
|
||||||
|
id: str
|
||||||
|
quotation_number: str
|
||||||
|
title: Optional[str] = None
|
||||||
|
customer_id: str
|
||||||
|
status: QuotationStatus
|
||||||
|
final_total: float
|
||||||
|
created_at: str
|
||||||
|
updated_at: str
|
||||||
|
nextcloud_pdf_url: Optional[str] = None
|
||||||
|
is_legacy: bool = False
|
||||||
|
legacy_date: Optional[str] = None
|
||||||
|
legacy_pdf_path: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class QuotationListResponse(BaseModel):
|
||||||
|
quotations: List[QuotationListItem]
|
||||||
|
total: int
|
||||||
|
|
||||||
|
|
||||||
|
class NextNumberResponse(BaseModel):
|
||||||
|
next_number: str
|
||||||
121
backend/crm/quotations_router.py
Normal file
@@ -0,0 +1,121 @@
|
|||||||
|
from fastapi import APIRouter, Depends, Query, UploadFile, File
|
||||||
|
from fastapi.responses import StreamingResponse
|
||||||
|
from typing import Optional
|
||||||
|
import io
|
||||||
|
|
||||||
|
from auth.dependencies import require_permission
|
||||||
|
from auth.models import TokenPayload
|
||||||
|
from crm.quotation_models import (
|
||||||
|
NextNumberResponse,
|
||||||
|
QuotationCreate,
|
||||||
|
QuotationInDB,
|
||||||
|
QuotationListResponse,
|
||||||
|
QuotationUpdate,
|
||||||
|
)
|
||||||
|
from crm import quotations_service as svc
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/api/crm/quotations", tags=["crm-quotations"])
|
||||||
|
|
||||||
|
|
||||||
|
# IMPORTANT: Static paths must come BEFORE /{id} to avoid route collision in FastAPI
|
||||||
|
|
||||||
|
@router.get("/next-number", response_model=NextNumberResponse)
|
||||||
|
async def get_next_number(
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||||
|
):
|
||||||
|
"""Returns the next available quotation number (preview only — does not commit)."""
|
||||||
|
next_num = await svc.get_next_number()
|
||||||
|
return NextNumberResponse(next_number=next_num)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/all", response_model=list[dict])
|
||||||
|
async def list_all_quotations(
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||||
|
):
|
||||||
|
"""Returns all quotations across all customers, each including customer_name."""
|
||||||
|
return await svc.list_all_quotations()
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/customer/{customer_id}", response_model=QuotationListResponse)
|
||||||
|
async def list_quotations_for_customer(
|
||||||
|
customer_id: str,
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||||
|
):
|
||||||
|
quotations = await svc.list_quotations(customer_id)
|
||||||
|
return QuotationListResponse(quotations=quotations, total=len(quotations))
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{quotation_id}/pdf")
|
||||||
|
async def proxy_quotation_pdf(
|
||||||
|
quotation_id: str,
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||||
|
):
|
||||||
|
"""Proxy the quotation PDF from Nextcloud to bypass browser cookie restrictions."""
|
||||||
|
pdf_bytes = await svc.get_quotation_pdf_bytes(quotation_id)
|
||||||
|
return StreamingResponse(
|
||||||
|
io.BytesIO(pdf_bytes),
|
||||||
|
media_type="application/pdf",
|
||||||
|
headers={"Content-Disposition": "inline"},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{quotation_id}", response_model=QuotationInDB)
|
||||||
|
async def get_quotation(
|
||||||
|
quotation_id: str,
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||||
|
):
|
||||||
|
return await svc.get_quotation(quotation_id)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("", response_model=QuotationInDB, status_code=201)
|
||||||
|
async def create_quotation(
|
||||||
|
body: QuotationCreate,
|
||||||
|
generate_pdf: bool = Query(False),
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Create a quotation. Pass ?generate_pdf=true to immediately generate and upload the PDF.
|
||||||
|
"""
|
||||||
|
return await svc.create_quotation(body, generate_pdf=generate_pdf)
|
||||||
|
|
||||||
|
|
||||||
|
@router.put("/{quotation_id}", response_model=QuotationInDB)
|
||||||
|
async def update_quotation(
|
||||||
|
quotation_id: str,
|
||||||
|
body: QuotationUpdate,
|
||||||
|
generate_pdf: bool = Query(False),
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Update a quotation. Pass ?generate_pdf=true to regenerate the PDF.
|
||||||
|
"""
|
||||||
|
return await svc.update_quotation(quotation_id, body, generate_pdf=generate_pdf)
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/{quotation_id}", status_code=204)
|
||||||
|
async def delete_quotation(
|
||||||
|
quotation_id: str,
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
await svc.delete_quotation(quotation_id)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{quotation_id}/regenerate-pdf", response_model=QuotationInDB)
|
||||||
|
async def regenerate_pdf(
|
||||||
|
quotation_id: str,
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
"""Force PDF regeneration and re-upload to Nextcloud."""
|
||||||
|
return await svc.regenerate_pdf(quotation_id)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{quotation_id}/legacy-pdf", response_model=QuotationInDB)
|
||||||
|
async def upload_legacy_pdf(
|
||||||
|
quotation_id: str,
|
||||||
|
file: UploadFile = File(...),
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
"""Upload a PDF file for a legacy quotation and store its Nextcloud path."""
|
||||||
|
pdf_bytes = await file.read()
|
||||||
|
filename = file.filename or f"legacy-{quotation_id}.pdf"
|
||||||
|
return await svc.upload_legacy_pdf(quotation_id, pdf_bytes, filename)
|
||||||
558
backend/crm/quotations_service.py
Normal file
@@ -0,0 +1,558 @@
|
|||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import uuid
|
||||||
|
from datetime import datetime
|
||||||
|
from decimal import Decimal, ROUND_HALF_UP
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from fastapi import HTTPException
|
||||||
|
|
||||||
|
from crm import nextcloud
|
||||||
|
from crm.quotation_models import (
|
||||||
|
QuotationCreate,
|
||||||
|
QuotationInDB,
|
||||||
|
QuotationItemCreate,
|
||||||
|
QuotationItemInDB,
|
||||||
|
QuotationListItem,
|
||||||
|
QuotationUpdate,
|
||||||
|
)
|
||||||
|
from crm.service import get_customer
|
||||||
|
import database as mqtt_db
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Path to Jinja2 templates directory (relative to this file)
|
||||||
|
_TEMPLATES_DIR = Path(__file__).parent.parent / "templates"
|
||||||
|
|
||||||
|
|
||||||
|
# ── Helpers ───────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
def _d(value) -> Decimal:
|
||||||
|
"""Convert to Decimal safely."""
|
||||||
|
return Decimal(str(value if value is not None else 0))
|
||||||
|
|
||||||
|
|
||||||
|
def _float(d: Decimal) -> float:
|
||||||
|
"""Round Decimal to 2dp and return as float for storage."""
|
||||||
|
return float(d.quantize(Decimal("0.01"), rounding=ROUND_HALF_UP))
|
||||||
|
|
||||||
|
|
||||||
|
def _calculate_totals(
|
||||||
|
items: list,
|
||||||
|
global_discount_percent: float,
|
||||||
|
shipping_cost: float,
|
||||||
|
shipping_cost_discount: float,
|
||||||
|
install_cost: float,
|
||||||
|
install_cost_discount: float,
|
||||||
|
extras_cost: float,
|
||||||
|
) -> dict:
|
||||||
|
"""
|
||||||
|
Calculate all monetary totals using Decimal arithmetic (ROUND_HALF_UP).
|
||||||
|
VAT is computed per-item from each item's vat_percent field.
|
||||||
|
Shipping and install costs carry 0% VAT.
|
||||||
|
Returns a dict of floats ready for DB storage.
|
||||||
|
"""
|
||||||
|
# Per-line totals and per-item VAT
|
||||||
|
item_totals = []
|
||||||
|
item_vat = Decimal(0)
|
||||||
|
for item in items:
|
||||||
|
cost = _d(item.get("unit_cost", 0))
|
||||||
|
qty = _d(item.get("quantity", 1))
|
||||||
|
disc = _d(item.get("discount_percent", 0))
|
||||||
|
net = cost * qty * (1 - disc / 100)
|
||||||
|
item_totals.append(net)
|
||||||
|
vat_pct = _d(item.get("vat_percent", 24))
|
||||||
|
item_vat += net * (vat_pct / 100)
|
||||||
|
|
||||||
|
# Shipping net (VAT = 0%)
|
||||||
|
ship_gross = _d(shipping_cost)
|
||||||
|
ship_disc = _d(shipping_cost_discount)
|
||||||
|
ship_net = ship_gross * (1 - ship_disc / 100)
|
||||||
|
|
||||||
|
# Install net (VAT = 0%)
|
||||||
|
install_gross = _d(install_cost)
|
||||||
|
install_disc = _d(install_cost_discount)
|
||||||
|
install_net = install_gross * (1 - install_disc / 100)
|
||||||
|
|
||||||
|
subtotal = sum(item_totals, Decimal(0)) + ship_net + install_net
|
||||||
|
|
||||||
|
global_disc_pct = _d(global_discount_percent)
|
||||||
|
global_disc_amount = subtotal * (global_disc_pct / 100)
|
||||||
|
new_subtotal = subtotal - global_disc_amount
|
||||||
|
|
||||||
|
# Global discount proportionally reduces VAT too
|
||||||
|
if subtotal > 0:
|
||||||
|
disc_ratio = new_subtotal / subtotal
|
||||||
|
vat_amount = item_vat * disc_ratio
|
||||||
|
else:
|
||||||
|
vat_amount = Decimal(0)
|
||||||
|
|
||||||
|
extras = _d(extras_cost)
|
||||||
|
final_total = new_subtotal + vat_amount + extras
|
||||||
|
|
||||||
|
return {
|
||||||
|
"subtotal_before_discount": _float(subtotal),
|
||||||
|
"global_discount_amount": _float(global_disc_amount),
|
||||||
|
"new_subtotal": _float(new_subtotal),
|
||||||
|
"vat_amount": _float(vat_amount),
|
||||||
|
"final_total": _float(final_total),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _calc_line_total(item) -> float:
|
||||||
|
cost = _d(item.get("unit_cost", 0))
|
||||||
|
qty = _d(item.get("quantity", 1))
|
||||||
|
disc = _d(item.get("discount_percent", 0))
|
||||||
|
return _float(cost * qty * (1 - disc / 100))
|
||||||
|
|
||||||
|
|
||||||
|
async def _generate_quotation_number(db) -> str:
|
||||||
|
year = datetime.utcnow().year
|
||||||
|
prefix = f"QT-{year}-"
|
||||||
|
rows = await db.execute_fetchall(
|
||||||
|
"SELECT quotation_number FROM crm_quotations WHERE quotation_number LIKE ? ORDER BY quotation_number DESC LIMIT 1",
|
||||||
|
(f"{prefix}%",),
|
||||||
|
)
|
||||||
|
if rows:
|
||||||
|
last_num = rows[0][0] # e.g. "QT-2026-012"
|
||||||
|
try:
|
||||||
|
seq = int(last_num[len(prefix):]) + 1
|
||||||
|
except ValueError:
|
||||||
|
seq = 1
|
||||||
|
else:
|
||||||
|
seq = 1
|
||||||
|
return f"{prefix}{seq:03d}"
|
||||||
|
|
||||||
|
|
||||||
|
def _row_to_quotation(row: dict, items: list[dict]) -> QuotationInDB:
|
||||||
|
row = dict(row)
|
||||||
|
row["comments"] = json.loads(row.get("comments") or "[]")
|
||||||
|
row["quick_notes"] = json.loads(row.get("quick_notes") or "{}")
|
||||||
|
item_models = [QuotationItemInDB(**{k: v for k, v in i.items() if k in QuotationItemInDB.model_fields}) for i in items]
|
||||||
|
return QuotationInDB(**{k: v for k, v in row.items() if k in QuotationInDB.model_fields}, items=item_models)
|
||||||
|
|
||||||
|
|
||||||
|
def _row_to_list_item(row: dict) -> QuotationListItem:
|
||||||
|
return QuotationListItem(**{k: v for k, v in dict(row).items() if k in QuotationListItem.model_fields})
|
||||||
|
|
||||||
|
|
||||||
|
async def _fetch_items(db, quotation_id: str) -> list[dict]:
|
||||||
|
rows = await db.execute_fetchall(
|
||||||
|
"SELECT * FROM crm_quotation_items WHERE quotation_id = ? ORDER BY sort_order ASC",
|
||||||
|
(quotation_id,),
|
||||||
|
)
|
||||||
|
return [dict(r) for r in rows]
|
||||||
|
|
||||||
|
|
||||||
|
# ── Public API ────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
async def get_next_number() -> str:
|
||||||
|
db = await mqtt_db.get_db()
|
||||||
|
return await _generate_quotation_number(db)
|
||||||
|
|
||||||
|
|
||||||
|
async def list_all_quotations() -> list[dict]:
|
||||||
|
"""Return all quotations across all customers, with customer_name injected."""
|
||||||
|
from shared.firebase import get_db as get_firestore
|
||||||
|
db = await mqtt_db.get_db()
|
||||||
|
rows = await db.execute_fetchall(
|
||||||
|
"SELECT id, quotation_number, title, customer_id, status, final_total, created_at, updated_at, "
|
||||||
|
"nextcloud_pdf_url, is_legacy, legacy_date, legacy_pdf_path "
|
||||||
|
"FROM crm_quotations ORDER BY created_at DESC",
|
||||||
|
(),
|
||||||
|
)
|
||||||
|
items = [dict(r) for r in rows]
|
||||||
|
# Fetch unique customer names from Firestore in one pass
|
||||||
|
customer_ids = {i["customer_id"] for i in items if i.get("customer_id")}
|
||||||
|
customer_names: dict[str, str] = {}
|
||||||
|
if customer_ids:
|
||||||
|
fstore = get_firestore()
|
||||||
|
for cid in customer_ids:
|
||||||
|
try:
|
||||||
|
doc = fstore.collection("crm_customers").document(cid).get()
|
||||||
|
if doc.exists:
|
||||||
|
d = doc.to_dict()
|
||||||
|
parts = [d.get("name", ""), d.get("surname", ""), d.get("organization", "")]
|
||||||
|
label = " ".join(p for p in parts if p).strip()
|
||||||
|
customer_names[cid] = label or cid
|
||||||
|
except Exception:
|
||||||
|
customer_names[cid] = cid
|
||||||
|
for item in items:
|
||||||
|
item["customer_name"] = customer_names.get(item["customer_id"], "")
|
||||||
|
return items
|
||||||
|
|
||||||
|
|
||||||
|
async def list_quotations(customer_id: str) -> list[QuotationListItem]:
|
||||||
|
db = await mqtt_db.get_db()
|
||||||
|
rows = await db.execute_fetchall(
|
||||||
|
"SELECT id, quotation_number, title, customer_id, status, final_total, created_at, updated_at, "
|
||||||
|
"nextcloud_pdf_url, is_legacy, legacy_date, legacy_pdf_path "
|
||||||
|
"FROM crm_quotations WHERE customer_id = ? ORDER BY created_at DESC",
|
||||||
|
(customer_id,),
|
||||||
|
)
|
||||||
|
return [_row_to_list_item(dict(r)) for r in rows]
|
||||||
|
|
||||||
|
|
||||||
|
async def get_quotation(quotation_id: str) -> QuotationInDB:
|
||||||
|
db = await mqtt_db.get_db()
|
||||||
|
rows = await db.execute_fetchall(
|
||||||
|
"SELECT * FROM crm_quotations WHERE id = ?", (quotation_id,)
|
||||||
|
)
|
||||||
|
if not rows:
|
||||||
|
raise HTTPException(status_code=404, detail="Quotation not found")
|
||||||
|
items = await _fetch_items(db, quotation_id)
|
||||||
|
return _row_to_quotation(dict(rows[0]), items)
|
||||||
|
|
||||||
|
|
||||||
|
async def create_quotation(data: QuotationCreate, generate_pdf: bool = False) -> QuotationInDB:
|
||||||
|
db = await mqtt_db.get_db()
|
||||||
|
now = datetime.utcnow().isoformat()
|
||||||
|
qid = str(uuid.uuid4())
|
||||||
|
quotation_number = await _generate_quotation_number(db)
|
||||||
|
|
||||||
|
# Build items list for calculation
|
||||||
|
items_raw = [item.model_dump() for item in data.items]
|
||||||
|
|
||||||
|
# Calculate per-item line totals
|
||||||
|
for item in items_raw:
|
||||||
|
item["line_total"] = _calc_line_total(item)
|
||||||
|
|
||||||
|
totals = _calculate_totals(
|
||||||
|
items_raw,
|
||||||
|
data.global_discount_percent,
|
||||||
|
data.shipping_cost,
|
||||||
|
data.shipping_cost_discount,
|
||||||
|
data.install_cost,
|
||||||
|
data.install_cost_discount,
|
||||||
|
data.extras_cost,
|
||||||
|
)
|
||||||
|
|
||||||
|
comments_json = json.dumps(data.comments)
|
||||||
|
quick_notes_json = json.dumps(data.quick_notes or {})
|
||||||
|
|
||||||
|
await db.execute(
|
||||||
|
"""INSERT INTO crm_quotations (
|
||||||
|
id, quotation_number, title, subtitle, customer_id,
|
||||||
|
language, status, order_type, shipping_method, estimated_shipping_date,
|
||||||
|
global_discount_label, global_discount_percent,
|
||||||
|
shipping_cost, shipping_cost_discount, install_cost, install_cost_discount,
|
||||||
|
extras_label, extras_cost, comments, quick_notes,
|
||||||
|
subtotal_before_discount, global_discount_amount, new_subtotal, vat_amount, final_total,
|
||||||
|
nextcloud_pdf_path, nextcloud_pdf_url,
|
||||||
|
client_org, client_name, client_location, client_phone, client_email,
|
||||||
|
is_legacy, legacy_date, legacy_pdf_path,
|
||||||
|
created_at, updated_at
|
||||||
|
) VALUES (
|
||||||
|
?, ?, ?, ?, ?,
|
||||||
|
?, 'draft', ?, ?, ?,
|
||||||
|
?, ?,
|
||||||
|
?, ?, ?, ?,
|
||||||
|
?, ?, ?, ?,
|
||||||
|
?, ?, ?, ?, ?,
|
||||||
|
NULL, NULL,
|
||||||
|
?, ?, ?, ?, ?,
|
||||||
|
?, ?, ?,
|
||||||
|
?, ?
|
||||||
|
)""",
|
||||||
|
(
|
||||||
|
qid, quotation_number, data.title, data.subtitle, data.customer_id,
|
||||||
|
data.language, data.order_type, data.shipping_method, data.estimated_shipping_date,
|
||||||
|
data.global_discount_label, data.global_discount_percent,
|
||||||
|
data.shipping_cost, data.shipping_cost_discount, data.install_cost, data.install_cost_discount,
|
||||||
|
data.extras_label, data.extras_cost, comments_json, quick_notes_json,
|
||||||
|
totals["subtotal_before_discount"], totals["global_discount_amount"],
|
||||||
|
totals["new_subtotal"], totals["vat_amount"], totals["final_total"],
|
||||||
|
data.client_org, data.client_name, data.client_location, data.client_phone, data.client_email,
|
||||||
|
1 if data.is_legacy else 0, data.legacy_date, data.legacy_pdf_path,
|
||||||
|
now, now,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Insert items
|
||||||
|
for i, item in enumerate(items_raw):
|
||||||
|
item_id = str(uuid.uuid4())
|
||||||
|
await db.execute(
|
||||||
|
"""INSERT INTO crm_quotation_items
|
||||||
|
(id, quotation_id, product_id, description, description_en, description_gr,
|
||||||
|
unit_type, unit_cost, discount_percent, quantity, vat_percent, line_total, sort_order)
|
||||||
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
|
||||||
|
(
|
||||||
|
item_id, qid, item.get("product_id"), item.get("description"),
|
||||||
|
item.get("description_en"), item.get("description_gr"),
|
||||||
|
item.get("unit_type", "pcs"), item.get("unit_cost", 0),
|
||||||
|
item.get("discount_percent", 0), item.get("quantity", 1),
|
||||||
|
item.get("vat_percent", 24), item["line_total"], item.get("sort_order", i),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
await db.commit()
|
||||||
|
|
||||||
|
quotation = await get_quotation(qid)
|
||||||
|
|
||||||
|
if generate_pdf and not data.is_legacy:
|
||||||
|
quotation = await _do_generate_and_upload_pdf(quotation)
|
||||||
|
|
||||||
|
return quotation
|
||||||
|
|
||||||
|
|
||||||
|
async def update_quotation(quotation_id: str, data: QuotationUpdate, generate_pdf: bool = False) -> QuotationInDB:
|
||||||
|
db = await mqtt_db.get_db()
|
||||||
|
rows = await db.execute_fetchall(
|
||||||
|
"SELECT * FROM crm_quotations WHERE id = ?", (quotation_id,)
|
||||||
|
)
|
||||||
|
if not rows:
|
||||||
|
raise HTTPException(status_code=404, detail="Quotation not found")
|
||||||
|
|
||||||
|
existing = dict(rows[0])
|
||||||
|
now = datetime.utcnow().isoformat()
|
||||||
|
|
||||||
|
# Merge update into existing values
|
||||||
|
update_fields = data.model_dump(exclude_none=True)
|
||||||
|
|
||||||
|
# Build SET clause — handle comments JSON separately
|
||||||
|
set_parts = []
|
||||||
|
params = []
|
||||||
|
|
||||||
|
scalar_fields = [
|
||||||
|
"title", "subtitle", "language", "status", "order_type", "shipping_method",
|
||||||
|
"estimated_shipping_date", "global_discount_label", "global_discount_percent",
|
||||||
|
"shipping_cost", "shipping_cost_discount", "install_cost",
|
||||||
|
"install_cost_discount", "extras_label", "extras_cost",
|
||||||
|
"client_org", "client_name", "client_location", "client_phone", "client_email",
|
||||||
|
"legacy_date", "legacy_pdf_path",
|
||||||
|
]
|
||||||
|
|
||||||
|
for field in scalar_fields:
|
||||||
|
if field in update_fields:
|
||||||
|
set_parts.append(f"{field} = ?")
|
||||||
|
params.append(update_fields[field])
|
||||||
|
|
||||||
|
if "comments" in update_fields:
|
||||||
|
set_parts.append("comments = ?")
|
||||||
|
params.append(json.dumps(update_fields["comments"]))
|
||||||
|
|
||||||
|
if "quick_notes" in update_fields:
|
||||||
|
set_parts.append("quick_notes = ?")
|
||||||
|
params.append(json.dumps(update_fields["quick_notes"] or {}))
|
||||||
|
|
||||||
|
# Recalculate totals using merged values
|
||||||
|
merged = {**existing, **{k: update_fields.get(k, existing.get(k)) for k in scalar_fields}}
|
||||||
|
|
||||||
|
# If items are being updated, recalculate with new items; otherwise use existing items
|
||||||
|
if "items" in update_fields:
|
||||||
|
items_raw = [item.model_dump() for item in data.items]
|
||||||
|
for item in items_raw:
|
||||||
|
item["line_total"] = _calc_line_total(item)
|
||||||
|
else:
|
||||||
|
existing_items = await _fetch_items(db, quotation_id)
|
||||||
|
items_raw = existing_items
|
||||||
|
|
||||||
|
totals = _calculate_totals(
|
||||||
|
items_raw,
|
||||||
|
float(merged.get("global_discount_percent", 0)),
|
||||||
|
float(merged.get("shipping_cost", 0)),
|
||||||
|
float(merged.get("shipping_cost_discount", 0)),
|
||||||
|
float(merged.get("install_cost", 0)),
|
||||||
|
float(merged.get("install_cost_discount", 0)),
|
||||||
|
float(merged.get("extras_cost", 0)),
|
||||||
|
)
|
||||||
|
|
||||||
|
for field, val in totals.items():
|
||||||
|
set_parts.append(f"{field} = ?")
|
||||||
|
params.append(val)
|
||||||
|
|
||||||
|
set_parts.append("updated_at = ?")
|
||||||
|
params.append(now)
|
||||||
|
params.append(quotation_id)
|
||||||
|
|
||||||
|
if set_parts:
|
||||||
|
await db.execute(
|
||||||
|
f"UPDATE crm_quotations SET {', '.join(set_parts)} WHERE id = ?",
|
||||||
|
params,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Replace items if provided
|
||||||
|
if "items" in update_fields:
|
||||||
|
await db.execute("DELETE FROM crm_quotation_items WHERE quotation_id = ?", (quotation_id,))
|
||||||
|
for i, item in enumerate(items_raw):
|
||||||
|
item_id = str(uuid.uuid4())
|
||||||
|
await db.execute(
|
||||||
|
"""INSERT INTO crm_quotation_items
|
||||||
|
(id, quotation_id, product_id, description, description_en, description_gr,
|
||||||
|
unit_type, unit_cost, discount_percent, quantity, vat_percent, line_total, sort_order)
|
||||||
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
|
||||||
|
(
|
||||||
|
item_id, quotation_id, item.get("product_id"), item.get("description"),
|
||||||
|
item.get("description_en"), item.get("description_gr"),
|
||||||
|
item.get("unit_type", "pcs"), item.get("unit_cost", 0),
|
||||||
|
item.get("discount_percent", 0), item.get("quantity", 1),
|
||||||
|
item.get("vat_percent", 24), item["line_total"], item.get("sort_order", i),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
await db.commit()
|
||||||
|
|
||||||
|
quotation = await get_quotation(quotation_id)
|
||||||
|
|
||||||
|
if generate_pdf:
|
||||||
|
quotation = await _do_generate_and_upload_pdf(quotation)
|
||||||
|
|
||||||
|
return quotation
|
||||||
|
|
||||||
|
|
||||||
|
async def delete_quotation(quotation_id: str) -> None:
|
||||||
|
db = await mqtt_db.get_db()
|
||||||
|
rows = await db.execute_fetchall(
|
||||||
|
"SELECT nextcloud_pdf_path FROM crm_quotations WHERE id = ?", (quotation_id,)
|
||||||
|
)
|
||||||
|
if not rows:
|
||||||
|
raise HTTPException(status_code=404, detail="Quotation not found")
|
||||||
|
|
||||||
|
pdf_path = dict(rows[0]).get("nextcloud_pdf_path")
|
||||||
|
|
||||||
|
await db.execute("DELETE FROM crm_quotation_items WHERE quotation_id = ?", (quotation_id,))
|
||||||
|
await db.execute("DELETE FROM crm_quotations WHERE id = ?", (quotation_id,))
|
||||||
|
await db.commit()
|
||||||
|
|
||||||
|
# Remove PDF from Nextcloud (best-effort)
|
||||||
|
if pdf_path:
|
||||||
|
try:
|
||||||
|
await nextcloud.delete_file(pdf_path)
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning("Failed to delete PDF from Nextcloud (%s): %s", pdf_path, e)
|
||||||
|
|
||||||
|
|
||||||
|
# ── PDF Generation ─────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
async def _do_generate_and_upload_pdf(quotation: QuotationInDB) -> QuotationInDB:
|
||||||
|
"""Generate PDF, upload to Nextcloud, update DB record. Returns updated quotation."""
|
||||||
|
try:
|
||||||
|
customer = get_customer(quotation.customer_id)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error("Cannot generate PDF — customer not found: %s", e)
|
||||||
|
return quotation
|
||||||
|
|
||||||
|
try:
|
||||||
|
pdf_bytes = await _generate_pdf_bytes(quotation, customer)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error("PDF generation failed for quotation %s: %s", quotation.id, e)
|
||||||
|
return quotation
|
||||||
|
|
||||||
|
# Delete old PDF if present
|
||||||
|
if quotation.nextcloud_pdf_path:
|
||||||
|
try:
|
||||||
|
await nextcloud.delete_file(quotation.nextcloud_pdf_path)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
pdf_path, pdf_url = await _upload_pdf(customer, quotation, pdf_bytes)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error("PDF upload failed for quotation %s: %s", quotation.id, e)
|
||||||
|
return quotation
|
||||||
|
|
||||||
|
# Persist paths
|
||||||
|
db = await mqtt_db.get_db()
|
||||||
|
await db.execute(
|
||||||
|
"UPDATE crm_quotations SET nextcloud_pdf_path = ?, nextcloud_pdf_url = ? WHERE id = ?",
|
||||||
|
(pdf_path, pdf_url, quotation.id),
|
||||||
|
)
|
||||||
|
await db.commit()
|
||||||
|
|
||||||
|
return await get_quotation(quotation.id)
|
||||||
|
|
||||||
|
|
||||||
|
async def _generate_pdf_bytes(quotation: QuotationInDB, customer) -> bytes:
|
||||||
|
"""Render Jinja2 template and convert to PDF via WeasyPrint."""
|
||||||
|
from jinja2 import Environment, FileSystemLoader, select_autoescape
|
||||||
|
import weasyprint
|
||||||
|
|
||||||
|
env = Environment(
|
||||||
|
loader=FileSystemLoader(str(_TEMPLATES_DIR)),
|
||||||
|
autoescape=select_autoescape(["html"]),
|
||||||
|
)
|
||||||
|
|
||||||
|
def format_money(value):
|
||||||
|
try:
|
||||||
|
f = float(value)
|
||||||
|
# Greek-style: dot thousands separator, comma decimal
|
||||||
|
formatted = f"{f:,.2f}".replace(",", "X").replace(".", ",").replace("X", ".")
|
||||||
|
return f"{formatted} €"
|
||||||
|
except (TypeError, ValueError):
|
||||||
|
return "0,00 €"
|
||||||
|
|
||||||
|
env.filters["format_money"] = format_money
|
||||||
|
|
||||||
|
template = env.get_template("quotation.html")
|
||||||
|
|
||||||
|
html_str = template.render(
|
||||||
|
quotation=quotation,
|
||||||
|
customer=customer,
|
||||||
|
lang=quotation.language,
|
||||||
|
)
|
||||||
|
|
||||||
|
pdf = weasyprint.HTML(string=html_str, base_url=str(_TEMPLATES_DIR)).write_pdf()
|
||||||
|
return pdf
|
||||||
|
|
||||||
|
|
||||||
|
async def _upload_pdf(customer, quotation: QuotationInDB, pdf_bytes: bytes) -> tuple[str, str]:
|
||||||
|
"""Upload PDF to Nextcloud, return (relative_path, public_url)."""
|
||||||
|
from crm.service import get_customer_nc_path
|
||||||
|
from config import settings
|
||||||
|
|
||||||
|
nc_folder = get_customer_nc_path(customer)
|
||||||
|
date_str = datetime.utcnow().strftime("%Y-%m-%d")
|
||||||
|
filename = f"Quotation-{quotation.quotation_number}-{date_str}.pdf"
|
||||||
|
rel_path = f"customers/{nc_folder}/quotations/{filename}"
|
||||||
|
|
||||||
|
await nextcloud.ensure_folder(f"customers/{nc_folder}/quotations")
|
||||||
|
await nextcloud.upload_file(rel_path, pdf_bytes, "application/pdf")
|
||||||
|
|
||||||
|
# Construct a direct WebDAV download URL
|
||||||
|
from crm.nextcloud import _full_url
|
||||||
|
pdf_url = _full_url(rel_path)
|
||||||
|
|
||||||
|
return rel_path, pdf_url
|
||||||
|
|
||||||
|
|
||||||
|
async def regenerate_pdf(quotation_id: str) -> QuotationInDB:
|
||||||
|
quotation = await get_quotation(quotation_id)
|
||||||
|
return await _do_generate_and_upload_pdf(quotation)
|
||||||
|
|
||||||
|
|
||||||
|
async def get_quotation_pdf_bytes(quotation_id: str) -> bytes:
|
||||||
|
"""Download the PDF for a quotation from Nextcloud and return raw bytes."""
|
||||||
|
from fastapi import HTTPException
|
||||||
|
quotation = await get_quotation(quotation_id)
|
||||||
|
# For legacy quotations, the PDF is at legacy_pdf_path
|
||||||
|
path = quotation.legacy_pdf_path if quotation.is_legacy else quotation.nextcloud_pdf_path
|
||||||
|
if not path:
|
||||||
|
raise HTTPException(status_code=404, detail="No PDF available for this quotation")
|
||||||
|
pdf_bytes, _ = await nextcloud.download_file(path)
|
||||||
|
return pdf_bytes
|
||||||
|
|
||||||
|
|
||||||
|
async def upload_legacy_pdf(quotation_id: str, pdf_bytes: bytes, filename: str) -> QuotationInDB:
|
||||||
|
"""Upload a legacy PDF to Nextcloud and store its path in the quotation record."""
|
||||||
|
quotation = await get_quotation(quotation_id)
|
||||||
|
if not quotation.is_legacy:
|
||||||
|
raise HTTPException(status_code=400, detail="This quotation is not a legacy quotation")
|
||||||
|
|
||||||
|
from crm.service import get_customer, get_customer_nc_path
|
||||||
|
customer = get_customer(quotation.customer_id)
|
||||||
|
nc_folder = get_customer_nc_path(customer)
|
||||||
|
|
||||||
|
await nextcloud.ensure_folder(f"customers/{nc_folder}/quotations")
|
||||||
|
rel_path = f"customers/{nc_folder}/quotations/{filename}"
|
||||||
|
await nextcloud.upload_file(rel_path, pdf_bytes, "application/pdf")
|
||||||
|
|
||||||
|
db = await mqtt_db.get_db()
|
||||||
|
now = datetime.utcnow().isoformat()
|
||||||
|
await db.execute(
|
||||||
|
"UPDATE crm_quotations SET legacy_pdf_path = ?, updated_at = ? WHERE id = ?",
|
||||||
|
(rel_path, now, quotation_id),
|
||||||
|
)
|
||||||
|
await db.commit()
|
||||||
|
return await get_quotation(quotation_id)
|
||||||
93
backend/crm/router.py
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
from fastapi import APIRouter, Depends, Query, UploadFile, File, HTTPException
|
||||||
|
from fastapi.responses import FileResponse
|
||||||
|
from typing import Optional
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
from auth.models import TokenPayload
|
||||||
|
from auth.dependencies import require_permission
|
||||||
|
from crm.models import ProductCreate, ProductUpdate, ProductInDB, ProductListResponse
|
||||||
|
from crm import service
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/api/crm/products", tags=["crm-products"])
|
||||||
|
|
||||||
|
PHOTO_DIR = os.path.join(os.path.dirname(__file__), "..", "storage", "product_images")
|
||||||
|
os.makedirs(PHOTO_DIR, exist_ok=True)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("", response_model=ProductListResponse)
|
||||||
|
def list_products(
|
||||||
|
search: Optional[str] = Query(None),
|
||||||
|
category: Optional[str] = Query(None),
|
||||||
|
active_only: bool = Query(False),
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||||
|
):
|
||||||
|
products = service.list_products(search=search, category=category, active_only=active_only)
|
||||||
|
return ProductListResponse(products=products, total=len(products))
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{product_id}", response_model=ProductInDB)
|
||||||
|
def get_product(
|
||||||
|
product_id: str,
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||||
|
):
|
||||||
|
return service.get_product(product_id)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("", response_model=ProductInDB, status_code=201)
|
||||||
|
def create_product(
|
||||||
|
body: ProductCreate,
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
return service.create_product(body)
|
||||||
|
|
||||||
|
|
||||||
|
@router.put("/{product_id}", response_model=ProductInDB)
|
||||||
|
def update_product(
|
||||||
|
product_id: str,
|
||||||
|
body: ProductUpdate,
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
return service.update_product(product_id, body)
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/{product_id}", status_code=204)
|
||||||
|
def delete_product(
|
||||||
|
product_id: str,
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
service.delete_product(product_id)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{product_id}/photo", response_model=ProductInDB)
|
||||||
|
async def upload_product_photo(
|
||||||
|
product_id: str,
|
||||||
|
file: UploadFile = File(...),
|
||||||
|
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||||
|
):
|
||||||
|
"""Upload a product photo. Accepts JPG or PNG, stored on disk."""
|
||||||
|
if file.content_type not in ("image/jpeg", "image/png", "image/webp"):
|
||||||
|
raise HTTPException(status_code=400, detail="Only JPG, PNG, or WebP images are accepted.")
|
||||||
|
ext = {"image/jpeg": "jpg", "image/png": "png", "image/webp": "webp"}.get(file.content_type, "jpg")
|
||||||
|
photo_path = os.path.join(PHOTO_DIR, f"{product_id}.{ext}")
|
||||||
|
# Remove any old photo files for this product
|
||||||
|
for old_ext in ("jpg", "png", "webp"):
|
||||||
|
old_path = os.path.join(PHOTO_DIR, f"{product_id}.{old_ext}")
|
||||||
|
if os.path.exists(old_path) and old_path != photo_path:
|
||||||
|
os.remove(old_path)
|
||||||
|
with open(photo_path, "wb") as f:
|
||||||
|
shutil.copyfileobj(file.file, f)
|
||||||
|
photo_url = f"/crm/products/{product_id}/photo"
|
||||||
|
return service.update_product(product_id, ProductUpdate(photo_url=photo_url))
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{product_id}/photo")
|
||||||
|
def get_product_photo(
|
||||||
|
product_id: str,
|
||||||
|
):
|
||||||
|
"""Serve a product photo from disk."""
|
||||||
|
for ext in ("jpg", "png", "webp"):
|
||||||
|
photo_path = os.path.join(PHOTO_DIR, f"{product_id}.{ext}")
|
||||||
|
if os.path.exists(photo_path):
|
||||||
|
return FileResponse(photo_path)
|
||||||
|
raise HTTPException(status_code=404, detail="No photo found for this product.")
|
||||||
1248
backend/crm/service.py
Normal file
125
backend/crm/thumbnails.py
Normal file
@@ -0,0 +1,125 @@
|
|||||||
|
"""
|
||||||
|
Thumbnail generation for uploaded media files.
|
||||||
|
|
||||||
|
Supports:
|
||||||
|
- Images (via Pillow): JPEG thumbnail at 300×300 max
|
||||||
|
- Videos (via ffmpeg subprocess): extract first frame as JPEG
|
||||||
|
- PDFs (via pdf2image + Poppler): render first page as JPEG
|
||||||
|
|
||||||
|
Returns None if the type is unsupported or if generation fails.
|
||||||
|
"""
|
||||||
|
import io
|
||||||
|
import logging
|
||||||
|
import subprocess
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
THUMB_SIZE = (220, 220) # small enough for gallery tiles; keeps files ~4-6 KB
|
||||||
|
|
||||||
|
|
||||||
|
def _thumb_from_image(content: bytes) -> bytes | None:
|
||||||
|
try:
|
||||||
|
from PIL import Image, ImageOps
|
||||||
|
img = Image.open(io.BytesIO(content))
|
||||||
|
img = ImageOps.exif_transpose(img) # honour EXIF Orientation tag before resizing
|
||||||
|
img = img.convert("RGB")
|
||||||
|
img.thumbnail(THUMB_SIZE, Image.LANCZOS)
|
||||||
|
out = io.BytesIO()
|
||||||
|
# quality=55 + optimize=True + progressive encoding → ~4-6 KB for typical photos
|
||||||
|
img.save(out, format="JPEG", quality=65, optimize=True, progressive=True)
|
||||||
|
return out.getvalue()
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning("Image thumbnail failed: %s", e)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _thumb_from_video(content: bytes) -> bytes | None:
|
||||||
|
"""
|
||||||
|
Extract the first frame of a video as a JPEG thumbnail.
|
||||||
|
|
||||||
|
We write the video to a temp file instead of piping it to ffmpeg because
|
||||||
|
most video containers (MP4, MOV, MKV …) store their index (moov atom) at
|
||||||
|
an arbitrary offset and ffmpeg cannot seek on a pipe — causing rc≠0 with
|
||||||
|
"moov atom not found" or similar errors when stdin is used.
|
||||||
|
"""
|
||||||
|
import tempfile
|
||||||
|
import os
|
||||||
|
try:
|
||||||
|
# Write to a temp file so ffmpeg can seek freely
|
||||||
|
with tempfile.NamedTemporaryFile(suffix=".video", delete=False) as tmp_in:
|
||||||
|
tmp_in.write(content)
|
||||||
|
tmp_in_path = tmp_in.name
|
||||||
|
|
||||||
|
with tempfile.NamedTemporaryFile(suffix=".jpg", delete=False) as tmp_out:
|
||||||
|
tmp_out_path = tmp_out.name
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = subprocess.run(
|
||||||
|
[
|
||||||
|
"ffmpeg", "-y",
|
||||||
|
"-i", tmp_in_path,
|
||||||
|
"-vframes", "1",
|
||||||
|
"-vf", f"scale={THUMB_SIZE[0]}:-2",
|
||||||
|
"-q:v", "4", # JPEG quality 1-31 (lower = better); 4 ≈ ~80% quality
|
||||||
|
tmp_out_path,
|
||||||
|
],
|
||||||
|
capture_output=True,
|
||||||
|
timeout=60,
|
||||||
|
)
|
||||||
|
if result.returncode == 0 and os.path.getsize(tmp_out_path) > 0:
|
||||||
|
with open(tmp_out_path, "rb") as f:
|
||||||
|
return f.read()
|
||||||
|
logger.warning(
|
||||||
|
"ffmpeg video thumb failed (rc=%s): %s",
|
||||||
|
result.returncode,
|
||||||
|
result.stderr[-400:].decode(errors="replace") if result.stderr else "",
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
finally:
|
||||||
|
os.unlink(tmp_in_path)
|
||||||
|
try:
|
||||||
|
os.unlink(tmp_out_path)
|
||||||
|
except FileNotFoundError:
|
||||||
|
pass
|
||||||
|
except FileNotFoundError:
|
||||||
|
logger.warning("ffmpeg not found — video thumbnails unavailable")
|
||||||
|
return None
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning("Video thumbnail failed: %s", e)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _thumb_from_pdf(content: bytes) -> bytes | None:
|
||||||
|
try:
|
||||||
|
from pdf2image import convert_from_bytes
|
||||||
|
pages = convert_from_bytes(content, first_page=1, last_page=1, size=THUMB_SIZE)
|
||||||
|
if not pages:
|
||||||
|
return None
|
||||||
|
out = io.BytesIO()
|
||||||
|
pages[0].save(out, format="JPEG", quality=55, optimize=True, progressive=True)
|
||||||
|
return out.getvalue()
|
||||||
|
except ImportError:
|
||||||
|
logger.warning("pdf2image not installed — PDF thumbnails unavailable")
|
||||||
|
return None
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning("PDF thumbnail failed: %s", e)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def generate_thumbnail(content: bytes, mime_type: str, filename: str) -> bytes | None:
|
||||||
|
"""
|
||||||
|
Generate a small JPEG thumbnail for the given file content.
|
||||||
|
Returns JPEG bytes or None if unsupported / generation fails.
|
||||||
|
"""
|
||||||
|
mt = (mime_type or "").lower()
|
||||||
|
fn = (filename or "").lower()
|
||||||
|
|
||||||
|
if mt.startswith("image/"):
|
||||||
|
return _thumb_from_image(content)
|
||||||
|
if mt.startswith("video/"):
|
||||||
|
return _thumb_from_video(content)
|
||||||
|
if mt == "application/pdf" or fn.endswith(".pdf"):
|
||||||
|
return _thumb_from_pdf(content)
|
||||||
|
|
||||||
|
return None
|
||||||
39
backend/database/__init__.py
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
from database.core import (
|
||||||
|
init_db,
|
||||||
|
close_db,
|
||||||
|
get_db,
|
||||||
|
purge_loop,
|
||||||
|
purge_old_data,
|
||||||
|
insert_log,
|
||||||
|
insert_heartbeat,
|
||||||
|
insert_command,
|
||||||
|
update_command_response,
|
||||||
|
get_logs,
|
||||||
|
get_heartbeats,
|
||||||
|
get_commands,
|
||||||
|
get_latest_heartbeats,
|
||||||
|
get_pending_command,
|
||||||
|
upsert_alert,
|
||||||
|
delete_alert,
|
||||||
|
get_alerts,
|
||||||
|
)
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"init_db",
|
||||||
|
"close_db",
|
||||||
|
"get_db",
|
||||||
|
"purge_loop",
|
||||||
|
"purge_old_data",
|
||||||
|
"insert_log",
|
||||||
|
"insert_heartbeat",
|
||||||
|
"insert_command",
|
||||||
|
"update_command_response",
|
||||||
|
"get_logs",
|
||||||
|
"get_heartbeats",
|
||||||
|
"get_commands",
|
||||||
|
"get_latest_heartbeats",
|
||||||
|
"get_pending_command",
|
||||||
|
"upsert_alert",
|
||||||
|
"delete_alert",
|
||||||
|
"get_alerts",
|
||||||
|
]
|
||||||
454
backend/database/core.py
Normal file
@@ -0,0 +1,454 @@
|
|||||||
|
import aiosqlite
|
||||||
|
import asyncio
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
from datetime import datetime, timedelta, timezone
|
||||||
|
from config import settings
|
||||||
|
|
||||||
|
logger = logging.getLogger("database")
|
||||||
|
|
||||||
|
_db: aiosqlite.Connection | None = None
|
||||||
|
|
||||||
|
SCHEMA_STATEMENTS = [
|
||||||
|
"""CREATE TABLE IF NOT EXISTS device_logs (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
device_serial TEXT NOT NULL,
|
||||||
|
level TEXT NOT NULL,
|
||||||
|
message TEXT NOT NULL,
|
||||||
|
device_timestamp INTEGER,
|
||||||
|
received_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||||
|
)""",
|
||||||
|
"""CREATE TABLE IF NOT EXISTS heartbeats (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
device_serial TEXT NOT NULL,
|
||||||
|
device_id TEXT,
|
||||||
|
firmware_version TEXT,
|
||||||
|
ip_address TEXT,
|
||||||
|
gateway TEXT,
|
||||||
|
uptime_ms INTEGER,
|
||||||
|
uptime_display TEXT,
|
||||||
|
received_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||||
|
)""",
|
||||||
|
"""CREATE TABLE IF NOT EXISTS commands (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
device_serial TEXT NOT NULL,
|
||||||
|
command_name TEXT NOT NULL,
|
||||||
|
command_payload TEXT,
|
||||||
|
status TEXT NOT NULL DEFAULT 'pending',
|
||||||
|
response_payload TEXT,
|
||||||
|
sent_at TEXT NOT NULL DEFAULT (datetime('now')),
|
||||||
|
responded_at TEXT
|
||||||
|
)""",
|
||||||
|
"CREATE INDEX IF NOT EXISTS idx_logs_serial_time ON device_logs(device_serial, received_at)",
|
||||||
|
"CREATE INDEX IF NOT EXISTS idx_logs_level ON device_logs(level)",
|
||||||
|
"CREATE INDEX IF NOT EXISTS idx_heartbeats_serial_time ON heartbeats(device_serial, received_at)",
|
||||||
|
"CREATE INDEX IF NOT EXISTS idx_commands_serial_time ON commands(device_serial, sent_at)",
|
||||||
|
"CREATE INDEX IF NOT EXISTS idx_commands_status ON commands(status)",
|
||||||
|
# Melody drafts table
|
||||||
|
"""CREATE TABLE IF NOT EXISTS melody_drafts (
|
||||||
|
id TEXT PRIMARY KEY,
|
||||||
|
status TEXT NOT NULL DEFAULT 'draft',
|
||||||
|
data TEXT NOT NULL,
|
||||||
|
created_at TEXT NOT NULL DEFAULT (datetime('now')),
|
||||||
|
updated_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||||
|
)""",
|
||||||
|
"CREATE INDEX IF NOT EXISTS idx_melody_drafts_status ON melody_drafts(status)",
|
||||||
|
# Built melodies table (local melody builder)
|
||||||
|
"""CREATE TABLE IF NOT EXISTS built_melodies (
|
||||||
|
id TEXT PRIMARY KEY,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
pid TEXT NOT NULL,
|
||||||
|
steps TEXT NOT NULL,
|
||||||
|
binary_path TEXT,
|
||||||
|
progmem_code TEXT,
|
||||||
|
assigned_melody_ids TEXT NOT NULL DEFAULT '[]',
|
||||||
|
created_at TEXT NOT NULL DEFAULT (datetime('now')),
|
||||||
|
updated_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||||
|
)""",
|
||||||
|
# Manufacturing audit log
|
||||||
|
"""CREATE TABLE IF NOT EXISTS mfg_audit_log (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
timestamp TEXT NOT NULL DEFAULT (datetime('now')),
|
||||||
|
admin_user TEXT NOT NULL,
|
||||||
|
action TEXT NOT NULL,
|
||||||
|
serial_number TEXT,
|
||||||
|
detail TEXT
|
||||||
|
)""",
|
||||||
|
"CREATE INDEX IF NOT EXISTS idx_mfg_audit_time ON mfg_audit_log(timestamp)",
|
||||||
|
"CREATE INDEX IF NOT EXISTS idx_mfg_audit_action ON mfg_audit_log(action)",
|
||||||
|
# Active device alerts (current state, not history)
|
||||||
|
"""CREATE TABLE IF NOT EXISTS device_alerts (
|
||||||
|
device_serial TEXT NOT NULL,
|
||||||
|
subsystem TEXT NOT NULL,
|
||||||
|
state TEXT NOT NULL,
|
||||||
|
message TEXT,
|
||||||
|
updated_at TEXT NOT NULL DEFAULT (datetime('now')),
|
||||||
|
PRIMARY KEY (device_serial, subsystem)
|
||||||
|
)""",
|
||||||
|
"CREATE INDEX IF NOT EXISTS idx_device_alerts_serial ON device_alerts(device_serial)",
|
||||||
|
# CRM communications log
|
||||||
|
"""CREATE TABLE IF NOT EXISTS crm_comms_log (
|
||||||
|
id TEXT PRIMARY KEY,
|
||||||
|
customer_id TEXT,
|
||||||
|
type TEXT NOT NULL,
|
||||||
|
mail_account TEXT,
|
||||||
|
direction TEXT NOT NULL,
|
||||||
|
subject TEXT,
|
||||||
|
body TEXT,
|
||||||
|
body_html TEXT,
|
||||||
|
attachments TEXT NOT NULL DEFAULT '[]',
|
||||||
|
ext_message_id TEXT,
|
||||||
|
from_addr TEXT,
|
||||||
|
to_addrs TEXT,
|
||||||
|
logged_by TEXT,
|
||||||
|
occurred_at TEXT NOT NULL,
|
||||||
|
created_at TEXT NOT NULL
|
||||||
|
)""",
|
||||||
|
"CREATE INDEX IF NOT EXISTS idx_crm_comms_customer ON crm_comms_log(customer_id, occurred_at)",
|
||||||
|
# CRM media references
|
||||||
|
"""CREATE TABLE IF NOT EXISTS crm_media (
|
||||||
|
id TEXT PRIMARY KEY,
|
||||||
|
customer_id TEXT,
|
||||||
|
order_id TEXT,
|
||||||
|
filename TEXT NOT NULL,
|
||||||
|
nextcloud_path TEXT NOT NULL,
|
||||||
|
mime_type TEXT,
|
||||||
|
direction TEXT,
|
||||||
|
tags TEXT NOT NULL DEFAULT '[]',
|
||||||
|
uploaded_by TEXT,
|
||||||
|
created_at TEXT NOT NULL
|
||||||
|
)""",
|
||||||
|
"CREATE INDEX IF NOT EXISTS idx_crm_media_customer ON crm_media(customer_id)",
|
||||||
|
"CREATE INDEX IF NOT EXISTS idx_crm_media_order ON crm_media(order_id)",
|
||||||
|
# CRM sync state (last email sync timestamp, etc.)
|
||||||
|
"""CREATE TABLE IF NOT EXISTS crm_sync_state (
|
||||||
|
key TEXT PRIMARY KEY,
|
||||||
|
value TEXT
|
||||||
|
)""",
|
||||||
|
# CRM Quotations
|
||||||
|
"""CREATE TABLE IF NOT EXISTS crm_quotations (
|
||||||
|
id TEXT PRIMARY KEY,
|
||||||
|
quotation_number TEXT UNIQUE NOT NULL,
|
||||||
|
title TEXT,
|
||||||
|
subtitle TEXT,
|
||||||
|
customer_id TEXT NOT NULL,
|
||||||
|
language TEXT NOT NULL DEFAULT 'en',
|
||||||
|
status TEXT NOT NULL DEFAULT 'draft',
|
||||||
|
order_type TEXT,
|
||||||
|
shipping_method TEXT,
|
||||||
|
estimated_shipping_date TEXT,
|
||||||
|
global_discount_label TEXT,
|
||||||
|
global_discount_percent REAL NOT NULL DEFAULT 0,
|
||||||
|
vat_percent REAL NOT NULL DEFAULT 24,
|
||||||
|
shipping_cost REAL NOT NULL DEFAULT 0,
|
||||||
|
shipping_cost_discount REAL NOT NULL DEFAULT 0,
|
||||||
|
install_cost REAL NOT NULL DEFAULT 0,
|
||||||
|
install_cost_discount REAL NOT NULL DEFAULT 0,
|
||||||
|
extras_label TEXT,
|
||||||
|
extras_cost REAL NOT NULL DEFAULT 0,
|
||||||
|
comments TEXT NOT NULL DEFAULT '[]',
|
||||||
|
subtotal_before_discount REAL NOT NULL DEFAULT 0,
|
||||||
|
global_discount_amount REAL NOT NULL DEFAULT 0,
|
||||||
|
new_subtotal REAL NOT NULL DEFAULT 0,
|
||||||
|
vat_amount REAL NOT NULL DEFAULT 0,
|
||||||
|
final_total REAL NOT NULL DEFAULT 0,
|
||||||
|
nextcloud_pdf_path TEXT,
|
||||||
|
nextcloud_pdf_url TEXT,
|
||||||
|
created_at TEXT NOT NULL,
|
||||||
|
updated_at TEXT NOT NULL
|
||||||
|
)""",
|
||||||
|
"""CREATE TABLE IF NOT EXISTS crm_quotation_items (
|
||||||
|
id TEXT PRIMARY KEY,
|
||||||
|
quotation_id TEXT NOT NULL,
|
||||||
|
product_id TEXT,
|
||||||
|
description TEXT,
|
||||||
|
description_en TEXT,
|
||||||
|
description_gr TEXT,
|
||||||
|
unit_type TEXT NOT NULL DEFAULT 'pcs',
|
||||||
|
unit_cost REAL NOT NULL DEFAULT 0,
|
||||||
|
discount_percent REAL NOT NULL DEFAULT 0,
|
||||||
|
quantity REAL NOT NULL DEFAULT 1,
|
||||||
|
line_total REAL NOT NULL DEFAULT 0,
|
||||||
|
sort_order INTEGER NOT NULL DEFAULT 0,
|
||||||
|
FOREIGN KEY (quotation_id) REFERENCES crm_quotations(id)
|
||||||
|
)""",
|
||||||
|
"CREATE INDEX IF NOT EXISTS idx_crm_quotations_customer ON crm_quotations(customer_id)",
|
||||||
|
"CREATE INDEX IF NOT EXISTS idx_crm_quotation_items_quotation ON crm_quotation_items(quotation_id, sort_order)",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
async def init_db():
|
||||||
|
global _db
|
||||||
|
os.makedirs(os.path.dirname(os.path.abspath(settings.sqlite_db_path)), exist_ok=True)
|
||||||
|
_db = await aiosqlite.connect(settings.sqlite_db_path)
|
||||||
|
_db.row_factory = aiosqlite.Row
|
||||||
|
for stmt in SCHEMA_STATEMENTS:
|
||||||
|
await _db.execute(stmt)
|
||||||
|
await _db.commit()
|
||||||
|
# Migrations: add columns that may not exist in older DBs
|
||||||
|
_migrations = [
|
||||||
|
"ALTER TABLE crm_comms_log ADD COLUMN body_html TEXT",
|
||||||
|
"ALTER TABLE crm_comms_log ADD COLUMN mail_account TEXT",
|
||||||
|
"ALTER TABLE crm_comms_log ADD COLUMN from_addr TEXT",
|
||||||
|
"ALTER TABLE crm_comms_log ADD COLUMN to_addrs TEXT",
|
||||||
|
"ALTER TABLE crm_comms_log ADD COLUMN is_important INTEGER NOT NULL DEFAULT 0",
|
||||||
|
"ALTER TABLE crm_comms_log ADD COLUMN is_read INTEGER NOT NULL DEFAULT 0",
|
||||||
|
"ALTER TABLE crm_quotation_items ADD COLUMN vat_percent REAL NOT NULL DEFAULT 24",
|
||||||
|
"ALTER TABLE crm_quotations ADD COLUMN quick_notes TEXT NOT NULL DEFAULT '{}'",
|
||||||
|
"ALTER TABLE crm_quotations ADD COLUMN client_org TEXT",
|
||||||
|
"ALTER TABLE crm_quotations ADD COLUMN client_name TEXT",
|
||||||
|
"ALTER TABLE crm_quotations ADD COLUMN client_location TEXT",
|
||||||
|
"ALTER TABLE crm_quotations ADD COLUMN client_phone TEXT",
|
||||||
|
"ALTER TABLE crm_quotations ADD COLUMN client_email TEXT",
|
||||||
|
"ALTER TABLE crm_quotations ADD COLUMN is_legacy INTEGER NOT NULL DEFAULT 0",
|
||||||
|
"ALTER TABLE crm_quotations ADD COLUMN legacy_date TEXT",
|
||||||
|
"ALTER TABLE crm_quotations ADD COLUMN legacy_pdf_path TEXT",
|
||||||
|
"ALTER TABLE crm_media ADD COLUMN thumbnail_path TEXT",
|
||||||
|
"ALTER TABLE crm_quotation_items ADD COLUMN description_en TEXT",
|
||||||
|
"ALTER TABLE crm_quotation_items ADD COLUMN description_gr TEXT",
|
||||||
|
"ALTER TABLE built_melodies ADD COLUMN is_builtin INTEGER NOT NULL DEFAULT 0",
|
||||||
|
]
|
||||||
|
for m in _migrations:
|
||||||
|
try:
|
||||||
|
await _db.execute(m)
|
||||||
|
await _db.commit()
|
||||||
|
except Exception:
|
||||||
|
pass # column already exists
|
||||||
|
|
||||||
|
# Migration: drop NOT NULL on crm_comms_log.customer_id if it exists.
|
||||||
|
# SQLite doesn't support ALTER COLUMN, so we check via table_info and
|
||||||
|
# rebuild the table if needed.
|
||||||
|
rows = await _db.execute_fetchall("PRAGMA table_info(crm_comms_log)")
|
||||||
|
for row in rows:
|
||||||
|
# row: (cid, name, type, notnull, dflt_value, pk)
|
||||||
|
if row[1] == "customer_id" and row[3] == 1: # notnull=1
|
||||||
|
logger.info("Migrating crm_comms_log: removing NOT NULL from customer_id")
|
||||||
|
await _db.execute("ALTER TABLE crm_comms_log RENAME TO crm_comms_log_old")
|
||||||
|
await _db.execute("""CREATE TABLE crm_comms_log (
|
||||||
|
id TEXT PRIMARY KEY,
|
||||||
|
customer_id TEXT,
|
||||||
|
type TEXT NOT NULL,
|
||||||
|
mail_account TEXT,
|
||||||
|
direction TEXT NOT NULL,
|
||||||
|
subject TEXT,
|
||||||
|
body TEXT,
|
||||||
|
body_html TEXT,
|
||||||
|
attachments TEXT NOT NULL DEFAULT '[]',
|
||||||
|
ext_message_id TEXT,
|
||||||
|
from_addr TEXT,
|
||||||
|
to_addrs TEXT,
|
||||||
|
logged_by TEXT,
|
||||||
|
occurred_at TEXT NOT NULL,
|
||||||
|
created_at TEXT NOT NULL
|
||||||
|
)""")
|
||||||
|
await _db.execute("""INSERT INTO crm_comms_log
|
||||||
|
SELECT id, customer_id, type, NULL, direction, subject, body, body_html,
|
||||||
|
attachments, ext_message_id, from_addr, to_addrs, logged_by,
|
||||||
|
occurred_at, created_at
|
||||||
|
FROM crm_comms_log_old""")
|
||||||
|
await _db.execute("DROP TABLE crm_comms_log_old")
|
||||||
|
await _db.execute("CREATE INDEX IF NOT EXISTS idx_crm_comms_customer ON crm_comms_log(customer_id, occurred_at)")
|
||||||
|
await _db.commit()
|
||||||
|
logger.info("Migration complete: crm_comms_log.customer_id is now nullable")
|
||||||
|
break
|
||||||
|
logger.info(f"SQLite database initialized at {settings.sqlite_db_path}")
|
||||||
|
|
||||||
|
|
||||||
|
async def close_db():
|
||||||
|
global _db
|
||||||
|
if _db:
|
||||||
|
await _db.close()
|
||||||
|
_db = None
|
||||||
|
|
||||||
|
|
||||||
|
async def get_db() -> aiosqlite.Connection:
|
||||||
|
if _db is None:
|
||||||
|
await init_db()
|
||||||
|
return _db
|
||||||
|
|
||||||
|
|
||||||
|
# --- Insert Operations ---
|
||||||
|
|
||||||
|
async def insert_log(device_serial: str, level: str, message: str,
|
||||||
|
device_timestamp: int | None = None):
|
||||||
|
db = await get_db()
|
||||||
|
cursor = await db.execute(
|
||||||
|
"INSERT INTO device_logs (device_serial, level, message, device_timestamp) VALUES (?, ?, ?, ?)",
|
||||||
|
(device_serial, level, message, device_timestamp)
|
||||||
|
)
|
||||||
|
await db.commit()
|
||||||
|
return cursor.lastrowid
|
||||||
|
|
||||||
|
|
||||||
|
async def insert_heartbeat(device_serial: str, device_id: str,
|
||||||
|
firmware_version: str, ip_address: str,
|
||||||
|
gateway: str, uptime_ms: int, uptime_display: str):
|
||||||
|
db = await get_db()
|
||||||
|
cursor = await db.execute(
|
||||||
|
"""INSERT INTO heartbeats
|
||||||
|
(device_serial, device_id, firmware_version, ip_address, gateway, uptime_ms, uptime_display)
|
||||||
|
VALUES (?, ?, ?, ?, ?, ?, ?)""",
|
||||||
|
(device_serial, device_id, firmware_version, ip_address, gateway, uptime_ms, uptime_display)
|
||||||
|
)
|
||||||
|
await db.commit()
|
||||||
|
return cursor.lastrowid
|
||||||
|
|
||||||
|
|
||||||
|
async def insert_command(device_serial: str, command_name: str,
|
||||||
|
command_payload: dict) -> int:
|
||||||
|
db = await get_db()
|
||||||
|
cursor = await db.execute(
|
||||||
|
"INSERT INTO commands (device_serial, command_name, command_payload) VALUES (?, ?, ?)",
|
||||||
|
(device_serial, command_name, json.dumps(command_payload))
|
||||||
|
)
|
||||||
|
await db.commit()
|
||||||
|
return cursor.lastrowid
|
||||||
|
|
||||||
|
|
||||||
|
async def update_command_response(command_id: int, status: str,
|
||||||
|
response_payload: dict | None = None):
|
||||||
|
db = await get_db()
|
||||||
|
await db.execute(
|
||||||
|
"""UPDATE commands SET status = ?, response_payload = ?,
|
||||||
|
responded_at = datetime('now') WHERE id = ?""",
|
||||||
|
(status, json.dumps(response_payload) if response_payload else None, command_id)
|
||||||
|
)
|
||||||
|
await db.commit()
|
||||||
|
|
||||||
|
|
||||||
|
# --- Query Operations ---
|
||||||
|
|
||||||
|
async def get_logs(device_serial: str, level: str | None = None,
|
||||||
|
search: str | None = None,
|
||||||
|
limit: int = 100, offset: int = 0) -> tuple[list, int]:
|
||||||
|
db = await get_db()
|
||||||
|
where_clauses = ["device_serial = ?"]
|
||||||
|
params: list = [device_serial]
|
||||||
|
|
||||||
|
if level:
|
||||||
|
where_clauses.append("level = ?")
|
||||||
|
params.append(level)
|
||||||
|
if search:
|
||||||
|
where_clauses.append("message LIKE ?")
|
||||||
|
params.append(f"%{search}%")
|
||||||
|
|
||||||
|
where = " AND ".join(where_clauses)
|
||||||
|
|
||||||
|
count_row = await db.execute_fetchall(
|
||||||
|
f"SELECT COUNT(*) as cnt FROM device_logs WHERE {where}", params
|
||||||
|
)
|
||||||
|
total = count_row[0][0]
|
||||||
|
|
||||||
|
rows = await db.execute_fetchall(
|
||||||
|
f"SELECT * FROM device_logs WHERE {where} ORDER BY received_at DESC LIMIT ? OFFSET ?",
|
||||||
|
params + [limit, offset]
|
||||||
|
)
|
||||||
|
return [dict(r) for r in rows], total
|
||||||
|
|
||||||
|
|
||||||
|
async def get_heartbeats(device_serial: str, limit: int = 100,
|
||||||
|
offset: int = 0) -> tuple[list, int]:
|
||||||
|
db = await get_db()
|
||||||
|
count_row = await db.execute_fetchall(
|
||||||
|
"SELECT COUNT(*) FROM heartbeats WHERE device_serial = ?", (device_serial,)
|
||||||
|
)
|
||||||
|
total = count_row[0][0]
|
||||||
|
rows = await db.execute_fetchall(
|
||||||
|
"SELECT * FROM heartbeats WHERE device_serial = ? ORDER BY received_at DESC LIMIT ? OFFSET ?",
|
||||||
|
(device_serial, limit, offset)
|
||||||
|
)
|
||||||
|
return [dict(r) for r in rows], total
|
||||||
|
|
||||||
|
|
||||||
|
async def get_commands(device_serial: str, limit: int = 100,
|
||||||
|
offset: int = 0) -> tuple[list, int]:
|
||||||
|
db = await get_db()
|
||||||
|
count_row = await db.execute_fetchall(
|
||||||
|
"SELECT COUNT(*) FROM commands WHERE device_serial = ?", (device_serial,)
|
||||||
|
)
|
||||||
|
total = count_row[0][0]
|
||||||
|
rows = await db.execute_fetchall(
|
||||||
|
"SELECT * FROM commands WHERE device_serial = ? ORDER BY sent_at DESC LIMIT ? OFFSET ?",
|
||||||
|
(device_serial, limit, offset)
|
||||||
|
)
|
||||||
|
return [dict(r) for r in rows], total
|
||||||
|
|
||||||
|
|
||||||
|
async def get_latest_heartbeats() -> list:
|
||||||
|
db = await get_db()
|
||||||
|
rows = await db.execute_fetchall("""
|
||||||
|
SELECT h.* FROM heartbeats h
|
||||||
|
INNER JOIN (
|
||||||
|
SELECT device_serial, MAX(received_at) as max_time
|
||||||
|
FROM heartbeats GROUP BY device_serial
|
||||||
|
) latest ON h.device_serial = latest.device_serial
|
||||||
|
AND h.received_at = latest.max_time
|
||||||
|
""")
|
||||||
|
return [dict(r) for r in rows]
|
||||||
|
|
||||||
|
|
||||||
|
async def get_pending_command(device_serial: str) -> dict | None:
|
||||||
|
db = await get_db()
|
||||||
|
rows = await db.execute_fetchall(
|
||||||
|
"""SELECT * FROM commands WHERE device_serial = ? AND status = 'pending'
|
||||||
|
ORDER BY sent_at DESC LIMIT 1""",
|
||||||
|
(device_serial,)
|
||||||
|
)
|
||||||
|
return dict(rows[0]) if rows else None
|
||||||
|
|
||||||
|
|
||||||
|
# --- Cleanup ---
|
||||||
|
|
||||||
|
async def purge_old_data(retention_days: int | None = None):
|
||||||
|
days = retention_days or settings.mqtt_data_retention_days
|
||||||
|
cutoff = (datetime.now(timezone.utc) - timedelta(days=days)).isoformat()
|
||||||
|
db = await get_db()
|
||||||
|
await db.execute("DELETE FROM device_logs WHERE received_at < ?", (cutoff,))
|
||||||
|
await db.execute("DELETE FROM heartbeats WHERE received_at < ?", (cutoff,))
|
||||||
|
await db.execute("DELETE FROM commands WHERE sent_at < ?", (cutoff,))
|
||||||
|
await db.commit()
|
||||||
|
logger.info(f"Purged MQTT data older than {days} days")
|
||||||
|
|
||||||
|
|
||||||
|
async def purge_loop():
|
||||||
|
while True:
|
||||||
|
await asyncio.sleep(86400)
|
||||||
|
try:
|
||||||
|
await purge_old_data()
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Purge failed: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
# --- Device Alerts ---
|
||||||
|
|
||||||
|
async def upsert_alert(device_serial: str, subsystem: str, state: str,
|
||||||
|
message: str | None = None):
|
||||||
|
db = await get_db()
|
||||||
|
await db.execute(
|
||||||
|
"""INSERT INTO device_alerts (device_serial, subsystem, state, message, updated_at)
|
||||||
|
VALUES (?, ?, ?, ?, datetime('now'))
|
||||||
|
ON CONFLICT(device_serial, subsystem)
|
||||||
|
DO UPDATE SET state=excluded.state, message=excluded.message,
|
||||||
|
updated_at=excluded.updated_at""",
|
||||||
|
(device_serial, subsystem, state, message),
|
||||||
|
)
|
||||||
|
await db.commit()
|
||||||
|
|
||||||
|
|
||||||
|
async def delete_alert(device_serial: str, subsystem: str):
|
||||||
|
db = await get_db()
|
||||||
|
await db.execute(
|
||||||
|
"DELETE FROM device_alerts WHERE device_serial = ? AND subsystem = ?",
|
||||||
|
(device_serial, subsystem),
|
||||||
|
)
|
||||||
|
await db.commit()
|
||||||
|
|
||||||
|
|
||||||
|
async def get_alerts(device_serial: str) -> list:
|
||||||
|
db = await get_db()
|
||||||
|
rows = await db.execute_fetchall(
|
||||||
|
"SELECT * FROM device_alerts WHERE device_serial = ? ORDER BY updated_at DESC",
|
||||||
|
(device_serial,),
|
||||||
|
)
|
||||||
|
return [dict(r) for r in rows]
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
from pydantic import BaseModel, Field
|
from pydantic import BaseModel, Field
|
||||||
from typing import List, Optional
|
from typing import Any, Dict, List, Optional
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
|
|
||||||
|
|
||||||
@@ -31,11 +31,11 @@ class DeviceTiers(str, Enum):
|
|||||||
class DeviceNetworkSettings(BaseModel):
|
class DeviceNetworkSettings(BaseModel):
|
||||||
hostname: str = ""
|
hostname: str = ""
|
||||||
useStaticIP: bool = False
|
useStaticIP: bool = False
|
||||||
ipAddress: List[str] = []
|
ipAddress: Any = []
|
||||||
gateway: List[str] = []
|
gateway: Any = []
|
||||||
subnet: List[str] = []
|
subnet: Any = []
|
||||||
dns1: List[str] = []
|
dns1: Any = []
|
||||||
dns2: List[str] = []
|
dns2: Any = []
|
||||||
|
|
||||||
|
|
||||||
class DeviceClockSettings(BaseModel):
|
class DeviceClockSettings(BaseModel):
|
||||||
@@ -119,13 +119,19 @@ class DeviceCreate(BaseModel):
|
|||||||
device_subscription: DeviceSubInformation = DeviceSubInformation()
|
device_subscription: DeviceSubInformation = DeviceSubInformation()
|
||||||
device_stats: DeviceStatistics = DeviceStatistics()
|
device_stats: DeviceStatistics = DeviceStatistics()
|
||||||
events_on: bool = False
|
events_on: bool = False
|
||||||
device_location_coordinates: str = ""
|
device_location_coordinates: Any = None # GeoPoint dict {lat, lng} or legacy str
|
||||||
device_melodies_all: List[MelodyMainItem] = []
|
device_melodies_all: List[MelodyMainItem] = []
|
||||||
device_melodies_favorites: List[str] = []
|
device_melodies_favorites: List[str] = []
|
||||||
user_list: List[str] = []
|
user_list: List[str] = []
|
||||||
websocket_url: str = ""
|
websocket_url: str = ""
|
||||||
churchAssistantURL: str = ""
|
churchAssistantURL: str = ""
|
||||||
staffNotes: str = ""
|
staffNotes: str = ""
|
||||||
|
hw_family: str = ""
|
||||||
|
hw_revision: str = ""
|
||||||
|
tags: List[str] = []
|
||||||
|
serial_number: str = ""
|
||||||
|
customer_id: str = ""
|
||||||
|
mfg_status: str = ""
|
||||||
|
|
||||||
|
|
||||||
class DeviceUpdate(BaseModel):
|
class DeviceUpdate(BaseModel):
|
||||||
@@ -133,21 +139,28 @@ class DeviceUpdate(BaseModel):
|
|||||||
device_photo: Optional[str] = None
|
device_photo: Optional[str] = None
|
||||||
device_location: Optional[str] = None
|
device_location: Optional[str] = None
|
||||||
is_Online: Optional[bool] = None
|
is_Online: Optional[bool] = None
|
||||||
device_attributes: Optional[DeviceAttributes] = None
|
# Use raw dicts so only the fields actually sent are present — no Pydantic defaults
|
||||||
device_subscription: Optional[DeviceSubInformation] = None
|
device_attributes: Optional[Dict[str, Any]] = None
|
||||||
device_stats: Optional[DeviceStatistics] = None
|
device_subscription: Optional[Dict[str, Any]] = None
|
||||||
|
device_stats: Optional[Dict[str, Any]] = None
|
||||||
events_on: Optional[bool] = None
|
events_on: Optional[bool] = None
|
||||||
device_location_coordinates: Optional[str] = None
|
device_location_coordinates: Optional[Any] = None # dict {lat, lng} or legacy str
|
||||||
device_melodies_all: Optional[List[MelodyMainItem]] = None
|
device_melodies_all: Optional[List[MelodyMainItem]] = None
|
||||||
device_melodies_favorites: Optional[List[str]] = None
|
device_melodies_favorites: Optional[List[str]] = None
|
||||||
user_list: Optional[List[str]] = None
|
user_list: Optional[List[str]] = None
|
||||||
websocket_url: Optional[str] = None
|
websocket_url: Optional[str] = None
|
||||||
churchAssistantURL: Optional[str] = None
|
churchAssistantURL: Optional[str] = None
|
||||||
staffNotes: Optional[str] = None
|
staffNotes: Optional[str] = None
|
||||||
|
hw_family: Optional[str] = None
|
||||||
|
hw_revision: Optional[str] = None
|
||||||
|
tags: Optional[List[str]] = None
|
||||||
|
customer_id: Optional[str] = None
|
||||||
|
mfg_status: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
class DeviceInDB(DeviceCreate):
|
class DeviceInDB(DeviceCreate):
|
||||||
id: str
|
id: str
|
||||||
|
# Legacy field — kept for backwards compat; new docs use serial_number
|
||||||
device_id: str = ""
|
device_id: str = ""
|
||||||
|
|
||||||
|
|
||||||
@@ -156,6 +169,15 @@ class DeviceListResponse(BaseModel):
|
|||||||
total: int
|
total: int
|
||||||
|
|
||||||
|
|
||||||
|
class DeviceNoteCreate(BaseModel):
|
||||||
|
content: str
|
||||||
|
created_by: str = ""
|
||||||
|
|
||||||
|
|
||||||
|
class DeviceNoteUpdate(BaseModel):
|
||||||
|
content: str
|
||||||
|
|
||||||
|
|
||||||
class DeviceUserInfo(BaseModel):
|
class DeviceUserInfo(BaseModel):
|
||||||
"""User info resolved from device_users sub-collection or user_list."""
|
"""User info resolved from device_users sub-collection or user_list."""
|
||||||
user_id: str = ""
|
user_id: str = ""
|
||||||
|
|||||||
@@ -1,15 +1,25 @@
|
|||||||
from fastapi import APIRouter, Depends, Query
|
import uuid
|
||||||
from typing import Optional
|
from datetime import datetime
|
||||||
|
from fastapi import APIRouter, Depends, Query, HTTPException
|
||||||
|
from typing import Optional, List
|
||||||
|
from pydantic import BaseModel
|
||||||
from auth.models import TokenPayload
|
from auth.models import TokenPayload
|
||||||
from auth.dependencies import require_permission
|
from auth.dependencies import require_permission
|
||||||
from devices.models import (
|
from devices.models import (
|
||||||
DeviceCreate, DeviceUpdate, DeviceInDB, DeviceListResponse,
|
DeviceCreate, DeviceUpdate, DeviceInDB, DeviceListResponse,
|
||||||
DeviceUsersResponse, DeviceUserInfo,
|
DeviceUsersResponse, DeviceUserInfo,
|
||||||
|
DeviceNoteCreate, DeviceNoteUpdate,
|
||||||
)
|
)
|
||||||
from devices import service
|
from devices import service
|
||||||
|
import database as mqtt_db
|
||||||
|
from mqtt.models import DeviceAlertEntry, DeviceAlertsResponse
|
||||||
|
from shared.firebase import get_db as get_firestore
|
||||||
|
|
||||||
router = APIRouter(prefix="/api/devices", tags=["devices"])
|
router = APIRouter(prefix="/api/devices", tags=["devices"])
|
||||||
|
|
||||||
|
NOTES_COLLECTION = "notes"
|
||||||
|
CRM_COLLECTION = "crm_customers"
|
||||||
|
|
||||||
|
|
||||||
@router.get("", response_model=DeviceListResponse)
|
@router.get("", response_model=DeviceListResponse)
|
||||||
async def list_devices(
|
async def list_devices(
|
||||||
@@ -67,3 +77,385 @@ async def delete_device(
|
|||||||
_user: TokenPayload = Depends(require_permission("devices", "delete")),
|
_user: TokenPayload = Depends(require_permission("devices", "delete")),
|
||||||
):
|
):
|
||||||
service.delete_device(device_id)
|
service.delete_device(device_id)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{device_id}/alerts", response_model=DeviceAlertsResponse)
|
||||||
|
async def get_device_alerts(
|
||||||
|
device_id: str,
|
||||||
|
_user: TokenPayload = Depends(require_permission("devices", "view")),
|
||||||
|
):
|
||||||
|
"""Return the current active alert set for a device. Empty list means fully healthy."""
|
||||||
|
rows = await mqtt_db.get_alerts(device_id)
|
||||||
|
return DeviceAlertsResponse(alerts=[DeviceAlertEntry(**r) for r in rows])
|
||||||
|
|
||||||
|
|
||||||
|
# ─────────────────────────────────────────────────────────────────────────────
|
||||||
|
# Device Notes
|
||||||
|
# ─────────────────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
@router.get("/{device_id}/notes")
|
||||||
|
async def list_device_notes(
|
||||||
|
device_id: str,
|
||||||
|
_user: TokenPayload = Depends(require_permission("devices", "view")),
|
||||||
|
):
|
||||||
|
"""List all notes for a device."""
|
||||||
|
db = get_firestore()
|
||||||
|
docs = db.collection(NOTES_COLLECTION).where("device_id", "==", device_id).order_by("created_at").stream()
|
||||||
|
notes = []
|
||||||
|
for doc in docs:
|
||||||
|
note = doc.to_dict()
|
||||||
|
note["id"] = doc.id
|
||||||
|
# Convert Firestore Timestamps to ISO strings
|
||||||
|
for f in ("created_at", "updated_at"):
|
||||||
|
if hasattr(note.get(f), "isoformat"):
|
||||||
|
note[f] = note[f].isoformat()
|
||||||
|
notes.append(note)
|
||||||
|
return {"notes": notes, "total": len(notes)}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{device_id}/notes", status_code=201)
|
||||||
|
async def create_device_note(
|
||||||
|
device_id: str,
|
||||||
|
body: DeviceNoteCreate,
|
||||||
|
_user: TokenPayload = Depends(require_permission("devices", "edit")),
|
||||||
|
):
|
||||||
|
"""Create a new note for a device."""
|
||||||
|
db = get_firestore()
|
||||||
|
now = datetime.utcnow()
|
||||||
|
note_id = str(uuid.uuid4())
|
||||||
|
note_data = {
|
||||||
|
"device_id": device_id,
|
||||||
|
"content": body.content,
|
||||||
|
"created_by": body.created_by or _user.name or "",
|
||||||
|
"created_at": now,
|
||||||
|
"updated_at": now,
|
||||||
|
}
|
||||||
|
db.collection(NOTES_COLLECTION).document(note_id).set(note_data)
|
||||||
|
note_data["id"] = note_id
|
||||||
|
note_data["created_at"] = now.isoformat()
|
||||||
|
note_data["updated_at"] = now.isoformat()
|
||||||
|
return note_data
|
||||||
|
|
||||||
|
|
||||||
|
@router.put("/{device_id}/notes/{note_id}")
|
||||||
|
async def update_device_note(
|
||||||
|
device_id: str,
|
||||||
|
note_id: str,
|
||||||
|
body: DeviceNoteUpdate,
|
||||||
|
_user: TokenPayload = Depends(require_permission("devices", "edit")),
|
||||||
|
):
|
||||||
|
"""Update an existing device note."""
|
||||||
|
db = get_firestore()
|
||||||
|
doc_ref = db.collection(NOTES_COLLECTION).document(note_id)
|
||||||
|
doc = doc_ref.get()
|
||||||
|
if not doc.exists or doc.to_dict().get("device_id") != device_id:
|
||||||
|
raise HTTPException(status_code=404, detail="Note not found")
|
||||||
|
now = datetime.utcnow()
|
||||||
|
doc_ref.update({"content": body.content, "updated_at": now})
|
||||||
|
updated = doc.to_dict()
|
||||||
|
updated["id"] = note_id
|
||||||
|
updated["content"] = body.content
|
||||||
|
updated["updated_at"] = now.isoformat()
|
||||||
|
if hasattr(updated.get("created_at"), "isoformat"):
|
||||||
|
updated["created_at"] = updated["created_at"].isoformat()
|
||||||
|
return updated
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/{device_id}/notes/{note_id}", status_code=204)
|
||||||
|
async def delete_device_note(
|
||||||
|
device_id: str,
|
||||||
|
note_id: str,
|
||||||
|
_user: TokenPayload = Depends(require_permission("devices", "edit")),
|
||||||
|
):
|
||||||
|
"""Delete a device note."""
|
||||||
|
db = get_firestore()
|
||||||
|
doc_ref = db.collection(NOTES_COLLECTION).document(note_id)
|
||||||
|
doc = doc_ref.get()
|
||||||
|
if not doc.exists or doc.to_dict().get("device_id") != device_id:
|
||||||
|
raise HTTPException(status_code=404, detail="Note not found")
|
||||||
|
doc_ref.delete()
|
||||||
|
|
||||||
|
|
||||||
|
# ─────────────────────────────────────────────────────────────────────────────
|
||||||
|
# Device Tags
|
||||||
|
# ─────────────────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
class TagsUpdate(BaseModel):
|
||||||
|
tags: List[str]
|
||||||
|
|
||||||
|
|
||||||
|
@router.put("/{device_id}/tags", response_model=DeviceInDB)
|
||||||
|
async def update_device_tags(
|
||||||
|
device_id: str,
|
||||||
|
body: TagsUpdate,
|
||||||
|
_user: TokenPayload = Depends(require_permission("devices", "edit")),
|
||||||
|
):
|
||||||
|
"""Replace the tags list for a device."""
|
||||||
|
return service.update_device(device_id, DeviceUpdate(tags=body.tags))
|
||||||
|
|
||||||
|
|
||||||
|
# ─────────────────────────────────────────────────────────────────────────────
|
||||||
|
# Assign Device to Customer
|
||||||
|
# ─────────────────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
class CustomerSearchResult(BaseModel):
|
||||||
|
id: str
|
||||||
|
name: str
|
||||||
|
email: str
|
||||||
|
organization: str = ""
|
||||||
|
|
||||||
|
|
||||||
|
class AssignCustomerBody(BaseModel):
|
||||||
|
customer_id: str
|
||||||
|
label: str = ""
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{device_id}/customer-search")
|
||||||
|
async def search_customers_for_device(
|
||||||
|
device_id: str,
|
||||||
|
q: str = Query(""),
|
||||||
|
_user: TokenPayload = Depends(require_permission("devices", "view")),
|
||||||
|
):
|
||||||
|
"""Search customers by name, email, phone, org, or tags, returning top 20 matches."""
|
||||||
|
db = get_firestore()
|
||||||
|
docs = db.collection(CRM_COLLECTION).stream()
|
||||||
|
results = []
|
||||||
|
q_lower = q.lower().strip()
|
||||||
|
for doc in docs:
|
||||||
|
data = doc.to_dict()
|
||||||
|
name = data.get("name", "") or ""
|
||||||
|
surname = data.get("surname", "") or ""
|
||||||
|
email = data.get("email", "") or ""
|
||||||
|
organization = data.get("organization", "") or ""
|
||||||
|
phone = data.get("phone", "") or ""
|
||||||
|
tags = " ".join(data.get("tags", []) or [])
|
||||||
|
location = data.get("location") or {}
|
||||||
|
city = location.get("city", "") or ""
|
||||||
|
searchable = f"{name} {surname} {email} {organization} {phone} {tags} {city}".lower()
|
||||||
|
if not q_lower or q_lower in searchable:
|
||||||
|
results.append({
|
||||||
|
"id": doc.id,
|
||||||
|
"name": name,
|
||||||
|
"surname": surname,
|
||||||
|
"email": email,
|
||||||
|
"organization": organization,
|
||||||
|
"city": city,
|
||||||
|
})
|
||||||
|
if len(results) >= 20:
|
||||||
|
break
|
||||||
|
return {"results": results}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{device_id}/assign-customer")
|
||||||
|
async def assign_device_to_customer(
|
||||||
|
device_id: str,
|
||||||
|
body: AssignCustomerBody,
|
||||||
|
_user: TokenPayload = Depends(require_permission("devices", "edit")),
|
||||||
|
):
|
||||||
|
"""Assign a device to a customer.
|
||||||
|
|
||||||
|
- Sets owner field on the device document.
|
||||||
|
- Adds a console_device entry to the customer's owned_items list.
|
||||||
|
"""
|
||||||
|
db = get_firestore()
|
||||||
|
|
||||||
|
# Verify device exists
|
||||||
|
device = service.get_device(device_id)
|
||||||
|
|
||||||
|
# Get customer
|
||||||
|
customer_ref = db.collection(CRM_COLLECTION).document(body.customer_id)
|
||||||
|
customer_doc = customer_ref.get()
|
||||||
|
if not customer_doc.exists:
|
||||||
|
raise HTTPException(status_code=404, detail="Customer not found")
|
||||||
|
customer_data = customer_doc.to_dict()
|
||||||
|
customer_email = customer_data.get("email", "")
|
||||||
|
|
||||||
|
# Update device: owner email + customer_id
|
||||||
|
device_ref = db.collection("devices").document(device_id)
|
||||||
|
device_ref.update({"owner": customer_email, "customer_id": body.customer_id})
|
||||||
|
|
||||||
|
# Add to customer owned_items (avoid duplicates)
|
||||||
|
owned_items = customer_data.get("owned_items", []) or []
|
||||||
|
already_assigned = any(
|
||||||
|
item.get("type") == "console_device" and item.get("console_device", {}).get("device_id") == device_id
|
||||||
|
for item in owned_items
|
||||||
|
)
|
||||||
|
if not already_assigned:
|
||||||
|
owned_items.append({
|
||||||
|
"type": "console_device",
|
||||||
|
"console_device": {
|
||||||
|
"device_id": device_id,
|
||||||
|
"label": body.label or device.device_name or device_id,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
customer_ref.update({"owned_items": owned_items})
|
||||||
|
|
||||||
|
return {"status": "assigned", "device_id": device_id, "customer_id": body.customer_id}
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/{device_id}/assign-customer", status_code=204)
|
||||||
|
async def unassign_device_from_customer(
|
||||||
|
device_id: str,
|
||||||
|
customer_id: str = Query(...),
|
||||||
|
_user: TokenPayload = Depends(require_permission("devices", "edit")),
|
||||||
|
):
|
||||||
|
"""Remove device assignment from a customer."""
|
||||||
|
db = get_firestore()
|
||||||
|
|
||||||
|
# Clear customer_id on device
|
||||||
|
device_ref = db.collection("devices").document(device_id)
|
||||||
|
device_ref.update({"customer_id": ""})
|
||||||
|
|
||||||
|
# Remove from customer owned_items
|
||||||
|
customer_ref = db.collection(CRM_COLLECTION).document(customer_id)
|
||||||
|
customer_doc = customer_ref.get()
|
||||||
|
if customer_doc.exists:
|
||||||
|
customer_data = customer_doc.to_dict()
|
||||||
|
owned_items = [
|
||||||
|
item for item in (customer_data.get("owned_items") or [])
|
||||||
|
if not (item.get("type") == "console_device" and item.get("console_device", {}).get("device_id") == device_id)
|
||||||
|
]
|
||||||
|
customer_ref.update({"owned_items": owned_items})
|
||||||
|
|
||||||
|
|
||||||
|
# ─────────────────────────────────────────────────────────────────────────────
|
||||||
|
# Customer detail (for Owner display in fleet)
|
||||||
|
# ─────────────────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
@router.get("/{device_id}/customer")
|
||||||
|
async def get_device_customer(
|
||||||
|
device_id: str,
|
||||||
|
_user: TokenPayload = Depends(require_permission("devices", "view")),
|
||||||
|
):
|
||||||
|
"""Return basic customer details for a device's assigned customer_id."""
|
||||||
|
db = get_firestore()
|
||||||
|
device_ref = db.collection("devices").document(device_id)
|
||||||
|
device_doc = device_ref.get()
|
||||||
|
if not device_doc.exists:
|
||||||
|
raise HTTPException(status_code=404, detail="Device not found")
|
||||||
|
device_data = device_doc.to_dict() or {}
|
||||||
|
customer_id = device_data.get("customer_id")
|
||||||
|
if not customer_id:
|
||||||
|
return {"customer": None}
|
||||||
|
customer_doc = db.collection(CRM_COLLECTION).document(customer_id).get()
|
||||||
|
if not customer_doc.exists:
|
||||||
|
return {"customer": None}
|
||||||
|
cd = customer_doc.to_dict() or {}
|
||||||
|
return {
|
||||||
|
"customer": {
|
||||||
|
"id": customer_doc.id,
|
||||||
|
"name": cd.get("name") or "",
|
||||||
|
"email": cd.get("email") or "",
|
||||||
|
"organization": cd.get("organization") or "",
|
||||||
|
"phone": cd.get("phone") or "",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# ─────────────────────────────────────────────────────────────────────────────
|
||||||
|
# User list management (for Manage tab — assign/remove users from user_list)
|
||||||
|
# ─────────────────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
class UserSearchResult(BaseModel):
|
||||||
|
id: str
|
||||||
|
display_name: str = ""
|
||||||
|
email: str = ""
|
||||||
|
phone: str = ""
|
||||||
|
photo_url: str = ""
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{device_id}/user-search")
|
||||||
|
async def search_users_for_device(
|
||||||
|
device_id: str,
|
||||||
|
q: str = Query(""),
|
||||||
|
_user: TokenPayload = Depends(require_permission("devices", "view")),
|
||||||
|
):
|
||||||
|
"""Search the users collection by name, email, or phone."""
|
||||||
|
db = get_firestore()
|
||||||
|
docs = db.collection("users").stream()
|
||||||
|
results = []
|
||||||
|
q_lower = q.lower().strip()
|
||||||
|
for doc in docs:
|
||||||
|
data = doc.to_dict() or {}
|
||||||
|
name = (data.get("display_name") or "").lower()
|
||||||
|
email = (data.get("email") or "").lower()
|
||||||
|
phone = (data.get("phone") or "").lower()
|
||||||
|
if not q_lower or q_lower in name or q_lower in email or q_lower in phone:
|
||||||
|
results.append({
|
||||||
|
"id": doc.id,
|
||||||
|
"display_name": data.get("display_name") or "",
|
||||||
|
"email": data.get("email") or "",
|
||||||
|
"phone": data.get("phone") or "",
|
||||||
|
"photo_url": data.get("photo_url") or "",
|
||||||
|
})
|
||||||
|
if len(results) >= 20:
|
||||||
|
break
|
||||||
|
return {"results": results}
|
||||||
|
|
||||||
|
|
||||||
|
class AddUserBody(BaseModel):
|
||||||
|
user_id: str
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{device_id}/user-list", status_code=200)
|
||||||
|
async def add_user_to_device(
|
||||||
|
device_id: str,
|
||||||
|
body: AddUserBody,
|
||||||
|
_user: TokenPayload = Depends(require_permission("devices", "edit")),
|
||||||
|
):
|
||||||
|
"""Add a user reference to the device's user_list field."""
|
||||||
|
db = get_firestore()
|
||||||
|
device_ref = db.collection("devices").document(device_id)
|
||||||
|
device_doc = device_ref.get()
|
||||||
|
if not device_doc.exists:
|
||||||
|
raise HTTPException(status_code=404, detail="Device not found")
|
||||||
|
|
||||||
|
# Verify user exists
|
||||||
|
user_doc = db.collection("users").document(body.user_id).get()
|
||||||
|
if not user_doc.exists:
|
||||||
|
raise HTTPException(status_code=404, detail="User not found")
|
||||||
|
|
||||||
|
data = device_doc.to_dict() or {}
|
||||||
|
user_list = data.get("user_list", []) or []
|
||||||
|
|
||||||
|
# Avoid duplicates — check both string paths and DocumentReferences
|
||||||
|
from google.cloud.firestore_v1 import DocumentReference as DocRef
|
||||||
|
existing_ids = set()
|
||||||
|
for entry in user_list:
|
||||||
|
if isinstance(entry, DocRef):
|
||||||
|
existing_ids.add(entry.id)
|
||||||
|
elif isinstance(entry, str):
|
||||||
|
existing_ids.add(entry.split("/")[-1])
|
||||||
|
|
||||||
|
if body.user_id not in existing_ids:
|
||||||
|
user_ref = db.collection("users").document(body.user_id)
|
||||||
|
user_list.append(user_ref)
|
||||||
|
device_ref.update({"user_list": user_list})
|
||||||
|
|
||||||
|
return {"status": "added", "user_id": body.user_id}
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/{device_id}/user-list/{user_id}", status_code=200)
|
||||||
|
async def remove_user_from_device(
|
||||||
|
device_id: str,
|
||||||
|
user_id: str,
|
||||||
|
_user: TokenPayload = Depends(require_permission("devices", "edit")),
|
||||||
|
):
|
||||||
|
"""Remove a user reference from the device's user_list field."""
|
||||||
|
db = get_firestore()
|
||||||
|
device_ref = db.collection("devices").document(device_id)
|
||||||
|
device_doc = device_ref.get()
|
||||||
|
if not device_doc.exists:
|
||||||
|
raise HTTPException(status_code=404, detail="Device not found")
|
||||||
|
|
||||||
|
data = device_doc.to_dict() or {}
|
||||||
|
user_list = data.get("user_list", []) or []
|
||||||
|
|
||||||
|
# Remove any entry that resolves to this user_id
|
||||||
|
new_list = [
|
||||||
|
entry for entry in user_list
|
||||||
|
if not (isinstance(entry, str) and entry.split("/")[-1] == user_id)
|
||||||
|
]
|
||||||
|
device_ref.update({"user_list": new_list})
|
||||||
|
|
||||||
|
return {"status": "removed", "user_id": user_id}
|
||||||
|
|||||||
@@ -7,7 +7,6 @@ from google.cloud.firestore_v1 import GeoPoint, DocumentReference
|
|||||||
from shared.firebase import get_db
|
from shared.firebase import get_db
|
||||||
from shared.exceptions import NotFoundError
|
from shared.exceptions import NotFoundError
|
||||||
from devices.models import DeviceCreate, DeviceUpdate, DeviceInDB
|
from devices.models import DeviceCreate, DeviceUpdate, DeviceInDB
|
||||||
from mqtt.mosquitto import register_device_password
|
|
||||||
|
|
||||||
COLLECTION = "devices"
|
COLLECTION = "devices"
|
||||||
|
|
||||||
@@ -15,6 +14,33 @@ COLLECTION = "devices"
|
|||||||
SN_CHARS = string.ascii_uppercase + string.digits
|
SN_CHARS = string.ascii_uppercase + string.digits
|
||||||
SN_SEGMENT_LEN = 4
|
SN_SEGMENT_LEN = 4
|
||||||
|
|
||||||
|
# Clock/silence/backlight fields stored as Firestore Timestamps (written as datetime)
|
||||||
|
_TIMESTAMP_FIELD_NAMES = {
|
||||||
|
"daySilenceFrom", "daySilenceTo",
|
||||||
|
"nightSilenceFrom", "nightSilenceTo",
|
||||||
|
"backlightTurnOnTime", "backlightTurnOffTime",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _restore_timestamps(d: dict) -> dict:
|
||||||
|
"""Recursively convert ISO 8601 strings for known timestamp fields to datetime objects.
|
||||||
|
|
||||||
|
Firestore stores Python datetime objects as native Timestamps, which Flutter
|
||||||
|
reads as DateTime. Plain strings would break the Flutter app.
|
||||||
|
"""
|
||||||
|
result = {}
|
||||||
|
for k, v in d.items():
|
||||||
|
if isinstance(v, dict):
|
||||||
|
result[k] = _restore_timestamps(v)
|
||||||
|
elif isinstance(v, str) and k in _TIMESTAMP_FIELD_NAMES:
|
||||||
|
try:
|
||||||
|
result[k] = datetime.fromisoformat(v.replace("Z", "+00:00"))
|
||||||
|
except ValueError:
|
||||||
|
result[k] = v
|
||||||
|
else:
|
||||||
|
result[k] = v
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
def _generate_serial_number() -> str:
|
def _generate_serial_number() -> str:
|
||||||
"""Generate a unique serial number in the format BS-XXXX-XXXX."""
|
"""Generate a unique serial number in the format BS-XXXX-XXXX."""
|
||||||
@@ -26,10 +52,11 @@ def _generate_serial_number() -> str:
|
|||||||
def _ensure_unique_serial(db) -> str:
|
def _ensure_unique_serial(db) -> str:
|
||||||
"""Generate a serial number and verify it doesn't already exist in Firestore."""
|
"""Generate a serial number and verify it doesn't already exist in Firestore."""
|
||||||
existing_sns = set()
|
existing_sns = set()
|
||||||
for doc in db.collection(COLLECTION).select(["device_id"]).stream():
|
for doc in db.collection(COLLECTION).select(["serial_number"]).stream():
|
||||||
data = doc.to_dict()
|
data = doc.to_dict()
|
||||||
if data.get("device_id"):
|
sn = data.get("serial_number") or data.get("device_id")
|
||||||
existing_sns.add(data["device_id"])
|
if sn:
|
||||||
|
existing_sns.add(sn)
|
||||||
|
|
||||||
for _ in range(100): # safety limit
|
for _ in range(100): # safety limit
|
||||||
sn = _generate_serial_number()
|
sn = _generate_serial_number()
|
||||||
@@ -45,7 +72,7 @@ def _convert_firestore_value(val):
|
|||||||
# Firestore DatetimeWithNanoseconds is a datetime subclass
|
# Firestore DatetimeWithNanoseconds is a datetime subclass
|
||||||
return val.strftime("%d %B %Y at %H:%M:%S UTC%z")
|
return val.strftime("%d %B %Y at %H:%M:%S UTC%z")
|
||||||
if isinstance(val, GeoPoint):
|
if isinstance(val, GeoPoint):
|
||||||
return f"{val.latitude}° N, {val.longitude}° E"
|
return {"lat": val.latitude, "lng": val.longitude}
|
||||||
if isinstance(val, DocumentReference):
|
if isinstance(val, DocumentReference):
|
||||||
# Store the document path (e.g. "users/abc123")
|
# Store the document path (e.g. "users/abc123")
|
||||||
return val.path
|
return val.path
|
||||||
@@ -69,18 +96,40 @@ def _sanitize_dict(d: dict) -> dict:
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def _auto_upgrade_claimed(doc_ref, data: dict) -> dict:
|
||||||
|
"""If the device has entries in user_list and isn't already claimed/decommissioned,
|
||||||
|
upgrade mfg_status to 'claimed' automatically and return the updated data dict."""
|
||||||
|
current_status = data.get("mfg_status", "")
|
||||||
|
if current_status in ("claimed", "decommissioned"):
|
||||||
|
return data
|
||||||
|
user_list = data.get("user_list", []) or []
|
||||||
|
if user_list:
|
||||||
|
doc_ref.update({"mfg_status": "claimed"})
|
||||||
|
data = dict(data)
|
||||||
|
data["mfg_status"] = "claimed"
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
def _doc_to_device(doc) -> DeviceInDB:
|
def _doc_to_device(doc) -> DeviceInDB:
|
||||||
"""Convert a Firestore document snapshot to a DeviceInDB model."""
|
"""Convert a Firestore document snapshot to a DeviceInDB model.
|
||||||
data = _sanitize_dict(doc.to_dict())
|
|
||||||
|
Also auto-upgrades mfg_status to 'claimed' if user_list is non-empty.
|
||||||
|
"""
|
||||||
|
raw = doc.to_dict()
|
||||||
|
raw = _auto_upgrade_claimed(doc.reference, raw)
|
||||||
|
data = _sanitize_dict(raw)
|
||||||
return DeviceInDB(id=doc.id, **data)
|
return DeviceInDB(id=doc.id, **data)
|
||||||
|
|
||||||
|
|
||||||
|
FLEET_STATUSES = {"sold", "claimed"}
|
||||||
|
|
||||||
|
|
||||||
def list_devices(
|
def list_devices(
|
||||||
search: str | None = None,
|
search: str | None = None,
|
||||||
online_only: bool | None = None,
|
online_only: bool | None = None,
|
||||||
subscription_tier: str | None = None,
|
subscription_tier: str | None = None,
|
||||||
) -> list[DeviceInDB]:
|
) -> list[DeviceInDB]:
|
||||||
"""List devices with optional filters."""
|
"""List fleet devices (sold + claimed only) with optional filters."""
|
||||||
db = get_db()
|
db = get_db()
|
||||||
ref = db.collection(COLLECTION)
|
ref = db.collection(COLLECTION)
|
||||||
query = ref
|
query = ref
|
||||||
@@ -92,6 +141,14 @@ def list_devices(
|
|||||||
results = []
|
results = []
|
||||||
|
|
||||||
for doc in docs:
|
for doc in docs:
|
||||||
|
raw = doc.to_dict() or {}
|
||||||
|
|
||||||
|
# Only include sold/claimed devices in the fleet view.
|
||||||
|
# Legacy devices without mfg_status are included to avoid breaking old data.
|
||||||
|
mfg_status = raw.get("mfg_status")
|
||||||
|
if mfg_status and mfg_status not in FLEET_STATUSES:
|
||||||
|
continue
|
||||||
|
|
||||||
device = _doc_to_device(doc)
|
device = _doc_to_device(doc)
|
||||||
|
|
||||||
# Client-side filters
|
# Client-side filters
|
||||||
@@ -102,7 +159,7 @@ def list_devices(
|
|||||||
search_lower = search.lower()
|
search_lower = search.lower()
|
||||||
name_match = search_lower in (device.device_name or "").lower()
|
name_match = search_lower in (device.device_name or "").lower()
|
||||||
location_match = search_lower in (device.device_location or "").lower()
|
location_match = search_lower in (device.device_location or "").lower()
|
||||||
sn_match = search_lower in (device.device_id or "").lower()
|
sn_match = search_lower in (device.serial_number or "").lower()
|
||||||
if not (name_match or location_match or sn_match):
|
if not (name_match or location_match or sn_match):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -127,10 +184,6 @@ def create_device(data: DeviceCreate) -> DeviceInDB:
|
|||||||
# Generate unique serial number
|
# Generate unique serial number
|
||||||
serial_number = _ensure_unique_serial(db)
|
serial_number = _ensure_unique_serial(db)
|
||||||
|
|
||||||
# Generate MQTT password and register with Mosquitto
|
|
||||||
mqtt_password = secrets.token_urlsafe(24)
|
|
||||||
register_device_password(serial_number, mqtt_password)
|
|
||||||
|
|
||||||
doc_data = data.model_dump()
|
doc_data = data.model_dump()
|
||||||
doc_data["device_id"] = serial_number
|
doc_data["device_id"] = serial_number
|
||||||
|
|
||||||
@@ -139,6 +192,17 @@ def create_device(data: DeviceCreate) -> DeviceInDB:
|
|||||||
return DeviceInDB(id=doc_ref.id, **doc_data)
|
return DeviceInDB(id=doc_ref.id, **doc_data)
|
||||||
|
|
||||||
|
|
||||||
|
def _deep_merge(base: dict, overrides: dict) -> dict:
|
||||||
|
"""Recursively merge overrides into base, preserving unmentioned nested keys."""
|
||||||
|
result = dict(base)
|
||||||
|
for k, v in overrides.items():
|
||||||
|
if isinstance(v, dict) and isinstance(result.get(k), dict):
|
||||||
|
result[k] = _deep_merge(result[k], v)
|
||||||
|
else:
|
||||||
|
result[k] = v
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
def update_device(device_doc_id: str, data: DeviceUpdate) -> DeviceInDB:
|
def update_device(device_doc_id: str, data: DeviceUpdate) -> DeviceInDB:
|
||||||
"""Update an existing device document. Only provided fields are updated."""
|
"""Update an existing device document. Only provided fields are updated."""
|
||||||
db = get_db()
|
db = get_db()
|
||||||
@@ -149,16 +213,21 @@ def update_device(device_doc_id: str, data: DeviceUpdate) -> DeviceInDB:
|
|||||||
|
|
||||||
update_data = data.model_dump(exclude_none=True)
|
update_data = data.model_dump(exclude_none=True)
|
||||||
|
|
||||||
# For nested structs, merge with existing data rather than replacing
|
# Convert {lat, lng} dict to a Firestore GeoPoint
|
||||||
|
coords = update_data.get("device_location_coordinates")
|
||||||
|
if isinstance(coords, dict) and "lat" in coords and "lng" in coords:
|
||||||
|
update_data["device_location_coordinates"] = GeoPoint(coords["lat"], coords["lng"])
|
||||||
|
|
||||||
|
# Deep-merge nested structs so unmentioned sub-fields are preserved
|
||||||
existing = doc.to_dict()
|
existing = doc.to_dict()
|
||||||
nested_keys = (
|
nested_keys = (
|
||||||
"device_attributes", "device_subscription", "device_stats",
|
"device_attributes", "device_subscription", "device_stats",
|
||||||
)
|
)
|
||||||
for key in nested_keys:
|
for key in nested_keys:
|
||||||
if key in update_data and key in existing:
|
if key in update_data and isinstance(existing.get(key), dict):
|
||||||
merged = {**existing[key], **update_data[key]}
|
update_data[key] = _deep_merge(existing[key], update_data[key])
|
||||||
update_data[key] = merged
|
|
||||||
|
|
||||||
|
update_data = _restore_timestamps(update_data)
|
||||||
doc_ref.update(update_data)
|
doc_ref.update(update_data)
|
||||||
|
|
||||||
updated_doc = doc_ref.get()
|
updated_doc = doc_ref.get()
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ from shared.firebase import get_db
|
|||||||
from shared.exceptions import NotFoundError
|
from shared.exceptions import NotFoundError
|
||||||
from equipment.models import NoteCreate, NoteUpdate, NoteInDB
|
from equipment.models import NoteCreate, NoteUpdate, NoteInDB
|
||||||
|
|
||||||
COLLECTION = "equipment_notes"
|
COLLECTION = "notes"
|
||||||
|
|
||||||
VALID_CATEGORIES = {"general", "maintenance", "installation", "issue", "action_item", "other"}
|
VALID_CATEGORIES = {"general", "maintenance", "installation", "issue", "action_item", "other"}
|
||||||
|
|
||||||
|
|||||||
0
backend/firmware/__init__.py
Normal file
66
backend/firmware/models.py
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
from pydantic import BaseModel
|
||||||
|
from typing import Optional, List
|
||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
|
||||||
|
class UpdateType(str, Enum):
|
||||||
|
optional = "optional" # user-initiated only
|
||||||
|
mandatory = "mandatory" # auto-installs on next reboot
|
||||||
|
emergency = "emergency" # auto-installs on reboot + daily check + MQTT push
|
||||||
|
|
||||||
|
|
||||||
|
class FirmwareVersion(BaseModel):
|
||||||
|
id: str
|
||||||
|
hw_type: str # e.g. "vesper", "vesper_plus", "vesper_pro", "bespoke"
|
||||||
|
channel: str # "stable", "beta", "alpha", "testing"
|
||||||
|
version: str # semver e.g. "1.5"
|
||||||
|
filename: str
|
||||||
|
size_bytes: int
|
||||||
|
sha256: str
|
||||||
|
update_type: UpdateType = UpdateType.mandatory
|
||||||
|
min_fw_version: Optional[str] = None # minimum fw version required to install this
|
||||||
|
uploaded_at: str
|
||||||
|
changelog: Optional[str] = None
|
||||||
|
release_note: Optional[str] = None
|
||||||
|
is_latest: bool = False
|
||||||
|
bespoke_uid: Optional[str] = None # only set when hw_type == "bespoke"
|
||||||
|
|
||||||
|
|
||||||
|
class FirmwareListResponse(BaseModel):
|
||||||
|
firmware: List[FirmwareVersion]
|
||||||
|
total: int
|
||||||
|
|
||||||
|
|
||||||
|
class FirmwareMetadataResponse(BaseModel):
|
||||||
|
"""Returned by both /latest and /{version}/info endpoints.
|
||||||
|
|
||||||
|
Two orthogonal axes:
|
||||||
|
channel — the release track the device is subscribed to
|
||||||
|
("stable" | "beta" | "development")
|
||||||
|
Firmware validates this matches the channel it requested.
|
||||||
|
update_type — the urgency of THIS release, set by the publisher
|
||||||
|
("optional" | "mandatory" | "emergency")
|
||||||
|
Firmware reads mandatory/emergency booleans derived from this.
|
||||||
|
|
||||||
|
Additional firmware-compatible fields:
|
||||||
|
size — binary size in bytes (firmware reads "size", not "size_bytes")
|
||||||
|
mandatory — True when update_type is mandatory or emergency
|
||||||
|
emergency — True only when update_type is emergency
|
||||||
|
"""
|
||||||
|
hw_type: str
|
||||||
|
channel: str # release track — firmware validates this
|
||||||
|
version: str
|
||||||
|
size: int # firmware reads "size"
|
||||||
|
size_bytes: int # kept for admin-panel consumers
|
||||||
|
sha256: str
|
||||||
|
update_type: UpdateType # urgency enum — for admin panel display
|
||||||
|
mandatory: bool # derived: update_type in (mandatory, emergency)
|
||||||
|
emergency: bool # derived: update_type == emergency
|
||||||
|
min_fw_version: Optional[str] = None
|
||||||
|
download_url: str
|
||||||
|
uploaded_at: str
|
||||||
|
release_note: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
# Keep backwards-compatible alias
|
||||||
|
FirmwareLatestResponse = FirmwareMetadataResponse
|
||||||
180
backend/firmware/router.py
Normal file
@@ -0,0 +1,180 @@
|
|||||||
|
from fastapi import APIRouter, Depends, Query, UploadFile, File, Form, HTTPException
|
||||||
|
from fastapi.responses import FileResponse, PlainTextResponse
|
||||||
|
from pydantic import BaseModel
|
||||||
|
from typing import Optional
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from auth.models import TokenPayload
|
||||||
|
from auth.dependencies import require_permission
|
||||||
|
from firmware.models import FirmwareVersion, FirmwareListResponse, FirmwareMetadataResponse, UpdateType
|
||||||
|
from firmware import service
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/api/firmware", tags=["firmware"])
|
||||||
|
ota_router = APIRouter(prefix="/api/ota", tags=["ota-telemetry"])
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/upload", response_model=FirmwareVersion, status_code=201)
|
||||||
|
async def upload_firmware(
|
||||||
|
hw_type: str = Form(...),
|
||||||
|
channel: str = Form(...),
|
||||||
|
version: str = Form(...),
|
||||||
|
update_type: UpdateType = Form(UpdateType.mandatory),
|
||||||
|
min_fw_version: Optional[str] = Form(None),
|
||||||
|
changelog: Optional[str] = Form(None),
|
||||||
|
release_note: Optional[str] = Form(None),
|
||||||
|
bespoke_uid: Optional[str] = Form(None),
|
||||||
|
file: UploadFile = File(...),
|
||||||
|
_user: TokenPayload = Depends(require_permission("manufacturing", "add")),
|
||||||
|
):
|
||||||
|
file_bytes = await file.read()
|
||||||
|
return service.upload_firmware(
|
||||||
|
hw_type=hw_type,
|
||||||
|
channel=channel,
|
||||||
|
version=version,
|
||||||
|
file_bytes=file_bytes,
|
||||||
|
update_type=update_type,
|
||||||
|
min_fw_version=min_fw_version,
|
||||||
|
changelog=changelog,
|
||||||
|
release_note=release_note,
|
||||||
|
bespoke_uid=bespoke_uid,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("", response_model=FirmwareListResponse)
|
||||||
|
def list_firmware(
|
||||||
|
hw_type: Optional[str] = Query(None),
|
||||||
|
channel: Optional[str] = Query(None),
|
||||||
|
_user: TokenPayload = Depends(require_permission("manufacturing", "view")),
|
||||||
|
):
|
||||||
|
items = service.list_firmware(hw_type=hw_type, channel=channel)
|
||||||
|
return FirmwareListResponse(firmware=items, total=len(items))
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{hw_type}/{channel}/latest", response_model=FirmwareMetadataResponse)
|
||||||
|
def get_latest_firmware(
|
||||||
|
hw_type: str,
|
||||||
|
channel: str,
|
||||||
|
hw_version: Optional[str] = Query(None, description="Hardware revision from NVS, e.g. '1.0'"),
|
||||||
|
current_version: Optional[str] = Query(None, description="Currently running firmware semver, e.g. '1.2.3'"),
|
||||||
|
):
|
||||||
|
"""Returns metadata for the latest firmware for a given hw_type + channel.
|
||||||
|
No auth required — devices call this endpoint to check for updates.
|
||||||
|
"""
|
||||||
|
return service.get_latest(hw_type, channel, hw_version=hw_version, current_version=current_version)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{hw_type}/{channel}/latest/changelog", response_class=PlainTextResponse)
|
||||||
|
def get_latest_changelog(hw_type: str, channel: str):
|
||||||
|
"""Returns the full changelog for the latest firmware. Plain text."""
|
||||||
|
return service.get_latest_changelog(hw_type, channel)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{hw_type}/{channel}/{version}/info/changelog", response_class=PlainTextResponse)
|
||||||
|
def get_version_changelog(hw_type: str, channel: str, version: str):
|
||||||
|
"""Returns the full changelog for a specific firmware version. Plain text."""
|
||||||
|
return service.get_version_changelog(hw_type, channel, version)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{hw_type}/{channel}/{version}/info", response_model=FirmwareMetadataResponse)
|
||||||
|
def get_firmware_info(hw_type: str, channel: str, version: str):
|
||||||
|
"""Returns metadata for a specific firmware version.
|
||||||
|
No auth required — devices call this to resolve upgrade chains.
|
||||||
|
"""
|
||||||
|
return service.get_version_info(hw_type, channel, version)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{hw_type}/{channel}/{version}/firmware.bin")
|
||||||
|
def download_firmware(hw_type: str, channel: str, version: str):
|
||||||
|
"""Download the firmware binary. No auth required — devices call this directly."""
|
||||||
|
path = service.get_firmware_path(hw_type, channel, version)
|
||||||
|
return FileResponse(
|
||||||
|
path=str(path),
|
||||||
|
media_type="application/octet-stream",
|
||||||
|
filename="firmware.bin",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.put("/{firmware_id}", response_model=FirmwareVersion)
|
||||||
|
async def edit_firmware(
|
||||||
|
firmware_id: str,
|
||||||
|
channel: Optional[str] = Form(None),
|
||||||
|
version: Optional[str] = Form(None),
|
||||||
|
update_type: Optional[UpdateType] = Form(None),
|
||||||
|
min_fw_version: Optional[str] = Form(None),
|
||||||
|
changelog: Optional[str] = Form(None),
|
||||||
|
release_note: Optional[str] = Form(None),
|
||||||
|
bespoke_uid: Optional[str] = Form(None),
|
||||||
|
file: Optional[UploadFile] = File(None),
|
||||||
|
_user: TokenPayload = Depends(require_permission("manufacturing", "add")),
|
||||||
|
):
|
||||||
|
file_bytes = await file.read() if file and file.filename else None
|
||||||
|
return service.edit_firmware(
|
||||||
|
doc_id=firmware_id,
|
||||||
|
channel=channel,
|
||||||
|
version=version,
|
||||||
|
update_type=update_type,
|
||||||
|
min_fw_version=min_fw_version,
|
||||||
|
changelog=changelog,
|
||||||
|
release_note=release_note,
|
||||||
|
bespoke_uid=bespoke_uid,
|
||||||
|
file_bytes=file_bytes,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/{firmware_id}", status_code=204)
|
||||||
|
def delete_firmware(
|
||||||
|
firmware_id: str,
|
||||||
|
_user: TokenPayload = Depends(require_permission("manufacturing", "delete")),
|
||||||
|
):
|
||||||
|
service.delete_firmware(firmware_id)
|
||||||
|
|
||||||
|
|
||||||
|
# ─────────────────────────────────────────────────────────────────────────────
|
||||||
|
# OTA event telemetry — called by devices (no auth, best-effort)
|
||||||
|
# ─────────────────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
class OtaDownloadEvent(BaseModel):
|
||||||
|
device_uid: str
|
||||||
|
hw_type: str
|
||||||
|
hw_version: str
|
||||||
|
from_version: str
|
||||||
|
to_version: str
|
||||||
|
channel: str
|
||||||
|
|
||||||
|
|
||||||
|
class OtaFlashEvent(BaseModel):
|
||||||
|
device_uid: str
|
||||||
|
hw_type: str
|
||||||
|
hw_version: str
|
||||||
|
from_version: str
|
||||||
|
to_version: str
|
||||||
|
channel: str
|
||||||
|
sha256: str
|
||||||
|
|
||||||
|
|
||||||
|
@ota_router.post("/events/download", status_code=204)
|
||||||
|
def ota_event_download(event: OtaDownloadEvent):
|
||||||
|
"""Device reports that firmware was fully written to flash (pre-commit).
|
||||||
|
No auth required — best-effort telemetry from the device.
|
||||||
|
"""
|
||||||
|
logger.info(
|
||||||
|
"OTA download event: device=%s hw=%s/%s %s → %s (channel=%s)",
|
||||||
|
event.device_uid, event.hw_type, event.hw_version,
|
||||||
|
event.from_version, event.to_version, event.channel,
|
||||||
|
)
|
||||||
|
service.record_ota_event("download", event.model_dump())
|
||||||
|
|
||||||
|
|
||||||
|
@ota_router.post("/events/flash", status_code=204)
|
||||||
|
def ota_event_flash(event: OtaFlashEvent):
|
||||||
|
"""Device reports that firmware partition was committed and device is rebooting.
|
||||||
|
No auth required — best-effort telemetry from the device.
|
||||||
|
"""
|
||||||
|
logger.info(
|
||||||
|
"OTA flash event: device=%s hw=%s/%s %s → %s (channel=%s sha256=%.16s...)",
|
||||||
|
event.device_uid, event.hw_type, event.hw_version,
|
||||||
|
event.from_version, event.to_version, event.channel, event.sha256,
|
||||||
|
)
|
||||||
|
service.record_ota_event("flash", event.model_dump())
|
||||||
398
backend/firmware/service.py
Normal file
@@ -0,0 +1,398 @@
|
|||||||
|
import hashlib
|
||||||
|
import logging
|
||||||
|
import uuid
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from fastapi import HTTPException
|
||||||
|
|
||||||
|
from config import settings
|
||||||
|
from shared.firebase import get_db
|
||||||
|
from shared.exceptions import NotFoundError
|
||||||
|
from firmware.models import FirmwareVersion, FirmwareMetadataResponse, UpdateType
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
COLLECTION = "firmware_versions"
|
||||||
|
|
||||||
|
VALID_HW_TYPES = {"vesper", "vesper_plus", "vesper_pro", "chronos", "chronos_pro", "agnus", "agnus_mini", "bespoke"}
|
||||||
|
VALID_CHANNELS = {"stable", "beta", "alpha", "testing"}
|
||||||
|
|
||||||
|
|
||||||
|
def _storage_path(hw_type: str, channel: str, version: str) -> Path:
|
||||||
|
return Path(settings.firmware_storage_path) / hw_type / channel / version / "firmware.bin"
|
||||||
|
|
||||||
|
|
||||||
|
def _doc_to_firmware_version(doc) -> FirmwareVersion:
|
||||||
|
data = doc.to_dict() or {}
|
||||||
|
uploaded_raw = data.get("uploaded_at")
|
||||||
|
if isinstance(uploaded_raw, datetime):
|
||||||
|
uploaded_str = uploaded_raw.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||||
|
else:
|
||||||
|
uploaded_str = str(uploaded_raw) if uploaded_raw else ""
|
||||||
|
|
||||||
|
return FirmwareVersion(
|
||||||
|
id=doc.id,
|
||||||
|
hw_type=data.get("hw_type", ""),
|
||||||
|
channel=data.get("channel", ""),
|
||||||
|
version=data.get("version", ""),
|
||||||
|
filename=data.get("filename", "firmware.bin"),
|
||||||
|
size_bytes=data.get("size_bytes", 0),
|
||||||
|
sha256=data.get("sha256", ""),
|
||||||
|
update_type=data.get("update_type", UpdateType.mandatory),
|
||||||
|
min_fw_version=data.get("min_fw_version"),
|
||||||
|
uploaded_at=uploaded_str,
|
||||||
|
changelog=data.get("changelog"),
|
||||||
|
release_note=data.get("release_note"),
|
||||||
|
is_latest=data.get("is_latest", False),
|
||||||
|
bespoke_uid=data.get("bespoke_uid"),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _fw_to_metadata_response(fw: FirmwareVersion) -> FirmwareMetadataResponse:
|
||||||
|
download_url = f"/api/firmware/{fw.hw_type}/{fw.channel}/{fw.version}/firmware.bin"
|
||||||
|
is_emergency = fw.update_type == UpdateType.emergency
|
||||||
|
is_mandatory = fw.update_type in (UpdateType.mandatory, UpdateType.emergency)
|
||||||
|
return FirmwareMetadataResponse(
|
||||||
|
hw_type=fw.hw_type,
|
||||||
|
channel=fw.channel, # firmware validates this matches requested channel
|
||||||
|
version=fw.version,
|
||||||
|
size=fw.size_bytes, # firmware reads "size"
|
||||||
|
size_bytes=fw.size_bytes, # kept for admin-panel consumers
|
||||||
|
sha256=fw.sha256,
|
||||||
|
update_type=fw.update_type, # urgency enum — for admin panel display
|
||||||
|
mandatory=is_mandatory, # firmware reads this to decide auto-apply
|
||||||
|
emergency=is_emergency, # firmware reads this to decide immediate apply
|
||||||
|
min_fw_version=fw.min_fw_version,
|
||||||
|
download_url=download_url,
|
||||||
|
uploaded_at=fw.uploaded_at,
|
||||||
|
release_note=fw.release_note,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def upload_firmware(
|
||||||
|
hw_type: str,
|
||||||
|
channel: str,
|
||||||
|
version: str,
|
||||||
|
file_bytes: bytes,
|
||||||
|
update_type: UpdateType = UpdateType.mandatory,
|
||||||
|
min_fw_version: str | None = None,
|
||||||
|
changelog: str | None = None,
|
||||||
|
release_note: str | None = None,
|
||||||
|
bespoke_uid: str | None = None,
|
||||||
|
) -> FirmwareVersion:
|
||||||
|
if hw_type not in VALID_HW_TYPES:
|
||||||
|
raise HTTPException(status_code=400, detail=f"Invalid hw_type. Must be one of: {', '.join(sorted(VALID_HW_TYPES))}")
|
||||||
|
if channel not in VALID_CHANNELS:
|
||||||
|
raise HTTPException(status_code=400, detail=f"Invalid channel. Must be one of: {', '.join(sorted(VALID_CHANNELS))}")
|
||||||
|
if hw_type == "bespoke" and not bespoke_uid:
|
||||||
|
raise HTTPException(status_code=400, detail="bespoke_uid is required when hw_type is 'bespoke'")
|
||||||
|
|
||||||
|
db = get_db()
|
||||||
|
sha256 = hashlib.sha256(file_bytes).hexdigest()
|
||||||
|
now = datetime.now(timezone.utc)
|
||||||
|
|
||||||
|
# For bespoke firmware: if a firmware with the same bespoke_uid already exists,
|
||||||
|
# overwrite it (delete old doc + file, reuse same storage path keyed by uid).
|
||||||
|
if hw_type == "bespoke" and bespoke_uid:
|
||||||
|
existing_docs = list(
|
||||||
|
db.collection(COLLECTION)
|
||||||
|
.where("hw_type", "==", "bespoke")
|
||||||
|
.where("bespoke_uid", "==", bespoke_uid)
|
||||||
|
.stream()
|
||||||
|
)
|
||||||
|
for old_doc in existing_docs:
|
||||||
|
old_data = old_doc.to_dict() or {}
|
||||||
|
old_path = _storage_path("bespoke", old_data.get("channel", channel), old_data.get("version", version))
|
||||||
|
if old_path.exists():
|
||||||
|
old_path.unlink()
|
||||||
|
try:
|
||||||
|
old_path.parent.rmdir()
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
old_doc.reference.delete()
|
||||||
|
|
||||||
|
dest = _storage_path(hw_type, channel, version)
|
||||||
|
dest.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
dest.write_bytes(file_bytes)
|
||||||
|
|
||||||
|
doc_id = str(uuid.uuid4())
|
||||||
|
|
||||||
|
# Mark previous latest for this hw_type+channel as no longer latest
|
||||||
|
# (skip for bespoke — each bespoke_uid is its own independent firmware)
|
||||||
|
if hw_type != "bespoke":
|
||||||
|
prev_docs = (
|
||||||
|
db.collection(COLLECTION)
|
||||||
|
.where("hw_type", "==", hw_type)
|
||||||
|
.where("channel", "==", channel)
|
||||||
|
.where("is_latest", "==", True)
|
||||||
|
.stream()
|
||||||
|
)
|
||||||
|
for prev in prev_docs:
|
||||||
|
prev.reference.update({"is_latest": False})
|
||||||
|
|
||||||
|
doc_ref = db.collection(COLLECTION).document(doc_id)
|
||||||
|
doc_ref.set({
|
||||||
|
"hw_type": hw_type,
|
||||||
|
"channel": channel,
|
||||||
|
"version": version,
|
||||||
|
"filename": "firmware.bin",
|
||||||
|
"size_bytes": len(file_bytes),
|
||||||
|
"sha256": sha256,
|
||||||
|
"update_type": update_type.value,
|
||||||
|
"min_fw_version": min_fw_version,
|
||||||
|
"uploaded_at": now,
|
||||||
|
"changelog": changelog,
|
||||||
|
"release_note": release_note,
|
||||||
|
"is_latest": True,
|
||||||
|
"bespoke_uid": bespoke_uid,
|
||||||
|
})
|
||||||
|
|
||||||
|
return _doc_to_firmware_version(doc_ref.get())
|
||||||
|
|
||||||
|
|
||||||
|
def list_firmware(
|
||||||
|
hw_type: str | None = None,
|
||||||
|
channel: str | None = None,
|
||||||
|
) -> list[FirmwareVersion]:
|
||||||
|
db = get_db()
|
||||||
|
query = db.collection(COLLECTION)
|
||||||
|
if hw_type:
|
||||||
|
query = query.where("hw_type", "==", hw_type)
|
||||||
|
if channel:
|
||||||
|
query = query.where("channel", "==", channel)
|
||||||
|
|
||||||
|
docs = list(query.stream())
|
||||||
|
items = [_doc_to_firmware_version(doc) for doc in docs]
|
||||||
|
items.sort(key=lambda x: x.uploaded_at, reverse=True)
|
||||||
|
return items
|
||||||
|
|
||||||
|
|
||||||
|
def get_latest(hw_type: str, channel: str, hw_version: str | None = None, current_version: str | None = None) -> FirmwareMetadataResponse:
|
||||||
|
if hw_type not in VALID_HW_TYPES:
|
||||||
|
raise HTTPException(status_code=400, detail=f"Invalid hw_type '{hw_type}'")
|
||||||
|
if hw_type == "bespoke":
|
||||||
|
raise HTTPException(status_code=400, detail="Bespoke firmware is not served via auto-update. Use the direct download URL.")
|
||||||
|
if channel not in VALID_CHANNELS:
|
||||||
|
raise HTTPException(status_code=400, detail=f"Invalid channel '{channel}'")
|
||||||
|
|
||||||
|
db = get_db()
|
||||||
|
docs = list(
|
||||||
|
db.collection(COLLECTION)
|
||||||
|
.where("hw_type", "==", hw_type)
|
||||||
|
.where("channel", "==", channel)
|
||||||
|
.where("is_latest", "==", True)
|
||||||
|
.limit(1)
|
||||||
|
.stream()
|
||||||
|
)
|
||||||
|
if not docs:
|
||||||
|
raise NotFoundError("Firmware")
|
||||||
|
|
||||||
|
return _fw_to_metadata_response(_doc_to_firmware_version(docs[0]))
|
||||||
|
|
||||||
|
|
||||||
|
def get_version_info(hw_type: str, channel: str, version: str) -> FirmwareMetadataResponse:
|
||||||
|
"""Fetch metadata for a specific version. Used by devices resolving upgrade chains."""
|
||||||
|
if hw_type not in VALID_HW_TYPES:
|
||||||
|
raise HTTPException(status_code=400, detail=f"Invalid hw_type '{hw_type}'")
|
||||||
|
if channel not in VALID_CHANNELS:
|
||||||
|
raise HTTPException(status_code=400, detail=f"Invalid channel '{channel}'")
|
||||||
|
|
||||||
|
db = get_db()
|
||||||
|
docs = list(
|
||||||
|
db.collection(COLLECTION)
|
||||||
|
.where("hw_type", "==", hw_type)
|
||||||
|
.where("channel", "==", channel)
|
||||||
|
.where("version", "==", version)
|
||||||
|
.limit(1)
|
||||||
|
.stream()
|
||||||
|
)
|
||||||
|
if not docs:
|
||||||
|
raise NotFoundError("Firmware version")
|
||||||
|
|
||||||
|
return _fw_to_metadata_response(_doc_to_firmware_version(docs[0]))
|
||||||
|
|
||||||
|
|
||||||
|
def get_latest_changelog(hw_type: str, channel: str) -> str:
|
||||||
|
if hw_type not in VALID_HW_TYPES:
|
||||||
|
raise HTTPException(status_code=400, detail=f"Invalid hw_type '{hw_type}'")
|
||||||
|
if channel not in VALID_CHANNELS:
|
||||||
|
raise HTTPException(status_code=400, detail=f"Invalid channel '{channel}'")
|
||||||
|
|
||||||
|
db = get_db()
|
||||||
|
docs = list(
|
||||||
|
db.collection(COLLECTION)
|
||||||
|
.where("hw_type", "==", hw_type)
|
||||||
|
.where("channel", "==", channel)
|
||||||
|
.where("is_latest", "==", True)
|
||||||
|
.limit(1)
|
||||||
|
.stream()
|
||||||
|
)
|
||||||
|
if not docs:
|
||||||
|
raise NotFoundError("Firmware")
|
||||||
|
fw = _doc_to_firmware_version(docs[0])
|
||||||
|
if not fw.changelog:
|
||||||
|
raise NotFoundError("Changelog")
|
||||||
|
return fw.changelog
|
||||||
|
|
||||||
|
|
||||||
|
def get_version_changelog(hw_type: str, channel: str, version: str) -> str:
|
||||||
|
if hw_type not in VALID_HW_TYPES:
|
||||||
|
raise HTTPException(status_code=400, detail=f"Invalid hw_type '{hw_type}'")
|
||||||
|
if channel not in VALID_CHANNELS:
|
||||||
|
raise HTTPException(status_code=400, detail=f"Invalid channel '{channel}'")
|
||||||
|
|
||||||
|
db = get_db()
|
||||||
|
docs = list(
|
||||||
|
db.collection(COLLECTION)
|
||||||
|
.where("hw_type", "==", hw_type)
|
||||||
|
.where("channel", "==", channel)
|
||||||
|
.where("version", "==", version)
|
||||||
|
.limit(1)
|
||||||
|
.stream()
|
||||||
|
)
|
||||||
|
if not docs:
|
||||||
|
raise NotFoundError("Firmware version")
|
||||||
|
fw = _doc_to_firmware_version(docs[0])
|
||||||
|
if not fw.changelog:
|
||||||
|
raise NotFoundError("Changelog")
|
||||||
|
return fw.changelog
|
||||||
|
|
||||||
|
|
||||||
|
def get_firmware_path(hw_type: str, channel: str, version: str) -> Path:
|
||||||
|
path = _storage_path(hw_type, channel, version)
|
||||||
|
if not path.exists():
|
||||||
|
raise NotFoundError("Firmware binary")
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def record_ota_event(event_type: str, payload: dict[str, Any]) -> None:
|
||||||
|
"""Persist an OTA telemetry event (download or flash) to Firestore.
|
||||||
|
|
||||||
|
Best-effort — caller should not raise on failure.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
db = get_db()
|
||||||
|
db.collection("ota_events").add({
|
||||||
|
"event_type": event_type,
|
||||||
|
"received_at": datetime.now(timezone.utc),
|
||||||
|
**payload,
|
||||||
|
})
|
||||||
|
except Exception as exc:
|
||||||
|
logger.warning("Failed to persist OTA event (%s): %s", event_type, exc)
|
||||||
|
|
||||||
|
|
||||||
|
def edit_firmware(
|
||||||
|
doc_id: str,
|
||||||
|
channel: str | None = None,
|
||||||
|
version: str | None = None,
|
||||||
|
update_type: UpdateType | None = None,
|
||||||
|
min_fw_version: str | None = None,
|
||||||
|
changelog: str | None = None,
|
||||||
|
release_note: str | None = None,
|
||||||
|
bespoke_uid: str | None = None,
|
||||||
|
file_bytes: bytes | None = None,
|
||||||
|
) -> FirmwareVersion:
|
||||||
|
db = get_db()
|
||||||
|
doc_ref = db.collection(COLLECTION).document(doc_id)
|
||||||
|
doc = doc_ref.get()
|
||||||
|
if not doc.exists:
|
||||||
|
raise NotFoundError("Firmware")
|
||||||
|
|
||||||
|
data = doc.to_dict() or {}
|
||||||
|
hw_type = data["hw_type"]
|
||||||
|
old_channel = data.get("channel", "")
|
||||||
|
old_version = data.get("version", "")
|
||||||
|
|
||||||
|
effective_channel = channel if channel is not None else old_channel
|
||||||
|
effective_version = version if version is not None else old_version
|
||||||
|
|
||||||
|
if channel is not None and channel not in VALID_CHANNELS:
|
||||||
|
raise HTTPException(status_code=400, detail=f"Invalid channel. Must be one of: {', '.join(sorted(VALID_CHANNELS))}")
|
||||||
|
|
||||||
|
updates: dict = {}
|
||||||
|
if channel is not None:
|
||||||
|
updates["channel"] = channel
|
||||||
|
if version is not None:
|
||||||
|
updates["version"] = version
|
||||||
|
if update_type is not None:
|
||||||
|
updates["update_type"] = update_type.value
|
||||||
|
if min_fw_version is not None:
|
||||||
|
updates["min_fw_version"] = min_fw_version if min_fw_version else None
|
||||||
|
if changelog is not None:
|
||||||
|
updates["changelog"] = changelog if changelog else None
|
||||||
|
if release_note is not None:
|
||||||
|
updates["release_note"] = release_note if release_note else None
|
||||||
|
if bespoke_uid is not None:
|
||||||
|
updates["bespoke_uid"] = bespoke_uid if bespoke_uid else None
|
||||||
|
|
||||||
|
if file_bytes is not None:
|
||||||
|
# Move binary if path changed
|
||||||
|
old_path = _storage_path(hw_type, old_channel, old_version)
|
||||||
|
new_path = _storage_path(hw_type, effective_channel, effective_version)
|
||||||
|
if old_path != new_path and old_path.exists():
|
||||||
|
old_path.unlink()
|
||||||
|
try:
|
||||||
|
old_path.parent.rmdir()
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
new_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
new_path.write_bytes(file_bytes)
|
||||||
|
updates["sha256"] = hashlib.sha256(file_bytes).hexdigest()
|
||||||
|
updates["size_bytes"] = len(file_bytes)
|
||||||
|
elif (channel is not None and channel != old_channel) or (version is not None and version != old_version):
|
||||||
|
# Path changed but no new file — move existing binary
|
||||||
|
old_path = _storage_path(hw_type, old_channel, old_version)
|
||||||
|
new_path = _storage_path(hw_type, effective_channel, effective_version)
|
||||||
|
if old_path.exists() and old_path != new_path:
|
||||||
|
new_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
old_path.rename(new_path)
|
||||||
|
try:
|
||||||
|
old_path.parent.rmdir()
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if updates:
|
||||||
|
doc_ref.update(updates)
|
||||||
|
|
||||||
|
return _doc_to_firmware_version(doc_ref.get())
|
||||||
|
|
||||||
|
|
||||||
|
def delete_firmware(doc_id: str) -> None:
|
||||||
|
db = get_db()
|
||||||
|
doc_ref = db.collection(COLLECTION).document(doc_id)
|
||||||
|
doc = doc_ref.get()
|
||||||
|
if not doc.exists:
|
||||||
|
raise NotFoundError("Firmware")
|
||||||
|
|
||||||
|
data = doc.to_dict()
|
||||||
|
hw_type = data.get("hw_type", "")
|
||||||
|
channel = data.get("channel", "")
|
||||||
|
version = data.get("version", "")
|
||||||
|
was_latest = data.get("is_latest", False)
|
||||||
|
|
||||||
|
# Delete the binary file
|
||||||
|
path = _storage_path(hw_type, channel, version)
|
||||||
|
if path.exists():
|
||||||
|
path.unlink()
|
||||||
|
# Remove the version directory if empty
|
||||||
|
try:
|
||||||
|
path.parent.rmdir()
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
doc_ref.delete()
|
||||||
|
|
||||||
|
# If we deleted the latest, promote the next most recent as latest
|
||||||
|
if was_latest:
|
||||||
|
remaining = list(
|
||||||
|
db.collection(COLLECTION)
|
||||||
|
.where("hw_type", "==", hw_type)
|
||||||
|
.where("channel", "==", channel)
|
||||||
|
.stream()
|
||||||
|
)
|
||||||
|
if remaining:
|
||||||
|
# Sort in Python to avoid needing a composite Firestore index
|
||||||
|
remaining.sort(key=lambda d: d.to_dict().get("uploaded_at", ""), reverse=True)
|
||||||
|
remaining[0].reference.update({"is_latest": True})
|
||||||
@@ -9,12 +9,26 @@ from devices.router import router as devices_router
|
|||||||
from settings.router import router as settings_router
|
from settings.router import router as settings_router
|
||||||
from users.router import router as users_router
|
from users.router import router as users_router
|
||||||
from mqtt.router import router as mqtt_router
|
from mqtt.router import router as mqtt_router
|
||||||
|
from mqtt.auth import router as mqtt_auth_router
|
||||||
from equipment.router import router as equipment_router
|
from equipment.router import router as equipment_router
|
||||||
from staff.router import router as staff_router
|
from staff.router import router as staff_router
|
||||||
from helpdesk.router import router as helpdesk_router
|
from helpdesk.router import router as helpdesk_router
|
||||||
from builder.router import router as builder_router
|
from builder.router import router as builder_router
|
||||||
|
from manufacturing.router import router as manufacturing_router
|
||||||
|
from firmware.router import router as firmware_router, ota_router
|
||||||
|
from admin.router import router as admin_router
|
||||||
|
from crm.router import router as crm_products_router
|
||||||
|
from crm.customers_router import router as crm_customers_router
|
||||||
|
from crm.orders_router import router as crm_orders_router, global_router as crm_orders_global_router
|
||||||
|
from crm.comms_router import router as crm_comms_router
|
||||||
|
from crm.media_router import router as crm_media_router
|
||||||
|
from crm.nextcloud_router import router as crm_nextcloud_router
|
||||||
|
from crm.quotations_router import router as crm_quotations_router
|
||||||
|
from public.router import router as public_router
|
||||||
|
from crm.nextcloud import close_client as close_nextcloud_client, keepalive_ping as nextcloud_keepalive
|
||||||
|
from crm.mail_accounts import get_mail_accounts
|
||||||
from mqtt.client import mqtt_manager
|
from mqtt.client import mqtt_manager
|
||||||
from mqtt import database as mqtt_db
|
import database as db
|
||||||
from melodies import service as melody_service
|
from melodies import service as melody_service
|
||||||
|
|
||||||
app = FastAPI(
|
app = FastAPI(
|
||||||
@@ -38,25 +52,75 @@ app.include_router(devices_router)
|
|||||||
app.include_router(settings_router)
|
app.include_router(settings_router)
|
||||||
app.include_router(users_router)
|
app.include_router(users_router)
|
||||||
app.include_router(mqtt_router)
|
app.include_router(mqtt_router)
|
||||||
|
app.include_router(mqtt_auth_router)
|
||||||
app.include_router(equipment_router)
|
app.include_router(equipment_router)
|
||||||
app.include_router(helpdesk_router)
|
app.include_router(helpdesk_router)
|
||||||
app.include_router(staff_router)
|
app.include_router(staff_router)
|
||||||
app.include_router(builder_router)
|
app.include_router(builder_router)
|
||||||
|
app.include_router(manufacturing_router)
|
||||||
|
app.include_router(firmware_router)
|
||||||
|
app.include_router(ota_router)
|
||||||
|
app.include_router(admin_router)
|
||||||
|
app.include_router(crm_products_router)
|
||||||
|
app.include_router(crm_customers_router)
|
||||||
|
app.include_router(crm_orders_router)
|
||||||
|
app.include_router(crm_orders_global_router)
|
||||||
|
app.include_router(crm_comms_router)
|
||||||
|
app.include_router(crm_media_router)
|
||||||
|
app.include_router(crm_nextcloud_router)
|
||||||
|
app.include_router(crm_quotations_router)
|
||||||
|
app.include_router(public_router)
|
||||||
|
|
||||||
|
|
||||||
|
async def nextcloud_keepalive_loop():
|
||||||
|
await nextcloud_keepalive() # eager warmup on startup
|
||||||
|
while True:
|
||||||
|
await asyncio.sleep(45)
|
||||||
|
await nextcloud_keepalive()
|
||||||
|
|
||||||
|
|
||||||
|
async def email_sync_loop():
|
||||||
|
while True:
|
||||||
|
await asyncio.sleep(settings.email_sync_interval_minutes * 60)
|
||||||
|
try:
|
||||||
|
from crm.email_sync import sync_emails
|
||||||
|
await sync_emails()
|
||||||
|
except Exception as e:
|
||||||
|
print(f"[EMAIL SYNC] Error: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
async def crm_poll_loop():
|
||||||
|
while True:
|
||||||
|
await asyncio.sleep(24 * 60 * 60) # once per day
|
||||||
|
try:
|
||||||
|
from crm.service import poll_crm_customer_statuses
|
||||||
|
poll_crm_customer_statuses()
|
||||||
|
except Exception as e:
|
||||||
|
print(f"[CRM POLL] Error: {e}")
|
||||||
|
|
||||||
|
|
||||||
@app.on_event("startup")
|
@app.on_event("startup")
|
||||||
async def startup():
|
async def startup():
|
||||||
init_firebase()
|
init_firebase()
|
||||||
await mqtt_db.init_db()
|
await db.init_db()
|
||||||
await melody_service.migrate_from_firestore()
|
await melody_service.migrate_from_firestore()
|
||||||
mqtt_manager.start(asyncio.get_event_loop())
|
mqtt_manager.start(asyncio.get_event_loop())
|
||||||
asyncio.create_task(mqtt_db.purge_loop())
|
asyncio.create_task(db.purge_loop())
|
||||||
|
asyncio.create_task(nextcloud_keepalive_loop())
|
||||||
|
asyncio.create_task(crm_poll_loop())
|
||||||
|
sync_accounts = [a for a in get_mail_accounts() if a.get("sync_inbound") and a.get("imap_host")]
|
||||||
|
if sync_accounts:
|
||||||
|
print(f"[EMAIL SYNC] IMAP configured for {len(sync_accounts)} account(s) - starting sync loop")
|
||||||
|
asyncio.create_task(email_sync_loop())
|
||||||
|
else:
|
||||||
|
print("[EMAIL SYNC] IMAP not configured - sync loop disabled")
|
||||||
|
|
||||||
|
|
||||||
@app.on_event("shutdown")
|
@app.on_event("shutdown")
|
||||||
async def shutdown():
|
async def shutdown():
|
||||||
mqtt_manager.stop()
|
mqtt_manager.stop()
|
||||||
await mqtt_db.close_db()
|
await db.close_db()
|
||||||
|
await close_nextcloud_client()
|
||||||
|
|
||||||
|
|
||||||
@app.get("/api/health")
|
@app.get("/api/health")
|
||||||
@@ -66,3 +130,4 @@ async def health_check():
|
|||||||
"firebase": firebase_initialized,
|
"firebase": firebase_initialized,
|
||||||
"mqtt": mqtt_manager.connected,
|
"mqtt": mqtt_manager.connected,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
0
backend/manufacturing/__init__.py
Normal file
41
backend/manufacturing/audit.py
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
import json
|
||||||
|
import logging
|
||||||
|
from database import get_db
|
||||||
|
|
||||||
|
logger = logging.getLogger("manufacturing.audit")
|
||||||
|
|
||||||
|
|
||||||
|
async def log_action(
|
||||||
|
admin_user: str,
|
||||||
|
action: str,
|
||||||
|
serial_number: str | None = None,
|
||||||
|
detail: dict | None = None,
|
||||||
|
):
|
||||||
|
"""Write a manufacturing audit entry to SQLite.
|
||||||
|
|
||||||
|
action examples: batch_created, device_flashed, device_assigned, status_updated
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
db = await get_db()
|
||||||
|
await db.execute(
|
||||||
|
"""INSERT INTO mfg_audit_log (admin_user, action, serial_number, detail)
|
||||||
|
VALUES (?, ?, ?, ?)""",
|
||||||
|
(
|
||||||
|
admin_user,
|
||||||
|
action,
|
||||||
|
serial_number,
|
||||||
|
json.dumps(detail) if detail else None,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
await db.commit()
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to write audit log: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
async def get_recent(limit: int = 20) -> list[dict]:
|
||||||
|
db = await get_db()
|
||||||
|
rows = await db.execute_fetchall(
|
||||||
|
"SELECT * FROM mfg_audit_log ORDER BY timestamp DESC LIMIT ?",
|
||||||
|
(limit,),
|
||||||
|
)
|
||||||
|
return [dict(r) for r in rows]
|
||||||
132
backend/manufacturing/models.py
Normal file
@@ -0,0 +1,132 @@
|
|||||||
|
from pydantic import BaseModel, Field
|
||||||
|
from typing import Optional, List
|
||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
|
||||||
|
class BoardType(str, Enum):
|
||||||
|
vesper = "vesper"
|
||||||
|
vesper_plus = "vesper_plus"
|
||||||
|
vesper_pro = "vesper_pro"
|
||||||
|
chronos = "chronos"
|
||||||
|
chronos_pro = "chronos_pro"
|
||||||
|
agnus_mini = "agnus_mini"
|
||||||
|
agnus = "agnus"
|
||||||
|
|
||||||
|
|
||||||
|
BOARD_TYPE_LABELS = {
|
||||||
|
"vesper": "Vesper",
|
||||||
|
"vesper_plus": "Vesper Plus",
|
||||||
|
"vesper_pro": "Vesper Pro",
|
||||||
|
"chronos": "Chronos",
|
||||||
|
"chronos_pro": "Chronos Pro",
|
||||||
|
"agnus_mini": "Agnus Mini",
|
||||||
|
"agnus": "Agnus",
|
||||||
|
}
|
||||||
|
|
||||||
|
# Family codes (BS + 4 chars = segment 1 of serial number)
|
||||||
|
BOARD_FAMILY_CODES = {
|
||||||
|
"vesper": "VSPR",
|
||||||
|
"vesper_plus": "VSPR",
|
||||||
|
"vesper_pro": "VSPR",
|
||||||
|
"agnus": "AGNS",
|
||||||
|
"agnus_mini": "AGNS",
|
||||||
|
"chronos": "CRNS",
|
||||||
|
"chronos_pro": "CRNS",
|
||||||
|
}
|
||||||
|
|
||||||
|
# Variant codes (3 chars = first part of segment 3 of serial number)
|
||||||
|
BOARD_VARIANT_CODES = {
|
||||||
|
"vesper": "STD",
|
||||||
|
"vesper_plus": "PLS",
|
||||||
|
"vesper_pro": "PRO",
|
||||||
|
"agnus": "STD",
|
||||||
|
"agnus_mini": "MIN",
|
||||||
|
"chronos": "STD",
|
||||||
|
"chronos_pro": "PRO",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class MfgStatus(str, Enum):
|
||||||
|
manufactured = "manufactured"
|
||||||
|
flashed = "flashed"
|
||||||
|
provisioned = "provisioned"
|
||||||
|
sold = "sold"
|
||||||
|
claimed = "claimed"
|
||||||
|
decommissioned = "decommissioned"
|
||||||
|
|
||||||
|
|
||||||
|
class LifecycleEntry(BaseModel):
|
||||||
|
status_id: str
|
||||||
|
date: str # ISO 8601 UTC string
|
||||||
|
note: Optional[str] = None
|
||||||
|
set_by: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class BatchCreate(BaseModel):
|
||||||
|
board_type: BoardType
|
||||||
|
board_version: str = Field(
|
||||||
|
...,
|
||||||
|
pattern=r"^\d+(\.\d+)*$",
|
||||||
|
description="SemVer-style version string, e.g. '1.0' or legacy '01'",
|
||||||
|
)
|
||||||
|
quantity: int = Field(..., ge=1, le=100)
|
||||||
|
|
||||||
|
|
||||||
|
class BatchResponse(BaseModel):
|
||||||
|
batch_id: str
|
||||||
|
serial_numbers: List[str]
|
||||||
|
board_type: str
|
||||||
|
board_version: str
|
||||||
|
created_at: str
|
||||||
|
|
||||||
|
|
||||||
|
class DeviceInventoryItem(BaseModel):
|
||||||
|
id: str
|
||||||
|
serial_number: str
|
||||||
|
hw_type: str
|
||||||
|
hw_version: str
|
||||||
|
mfg_status: str
|
||||||
|
mfg_batch_id: Optional[str] = None
|
||||||
|
created_at: Optional[str] = None
|
||||||
|
owner: Optional[str] = None
|
||||||
|
assigned_to: Optional[str] = None
|
||||||
|
device_name: Optional[str] = None
|
||||||
|
lifecycle_history: Optional[List["LifecycleEntry"]] = None
|
||||||
|
customer_id: Optional[str] = None
|
||||||
|
user_list: Optional[List[str]] = None
|
||||||
|
|
||||||
|
|
||||||
|
class DeviceInventoryListResponse(BaseModel):
|
||||||
|
devices: List[DeviceInventoryItem]
|
||||||
|
total: int
|
||||||
|
|
||||||
|
|
||||||
|
class DeviceStatusUpdate(BaseModel):
|
||||||
|
status: MfgStatus
|
||||||
|
note: Optional[str] = None
|
||||||
|
force_claimed: bool = False
|
||||||
|
|
||||||
|
|
||||||
|
class DeviceAssign(BaseModel):
|
||||||
|
customer_id: str
|
||||||
|
|
||||||
|
|
||||||
|
class CustomerSearchResult(BaseModel):
|
||||||
|
id: str
|
||||||
|
name: str = ""
|
||||||
|
email: str = ""
|
||||||
|
organization: str = ""
|
||||||
|
phone: str = ""
|
||||||
|
|
||||||
|
|
||||||
|
class RecentActivityItem(BaseModel):
|
||||||
|
serial_number: str
|
||||||
|
hw_type: str
|
||||||
|
mfg_status: str
|
||||||
|
owner: Optional[str] = None
|
||||||
|
updated_at: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class ManufacturingStats(BaseModel):
|
||||||
|
counts: dict
|
||||||
|
recent_activity: List[RecentActivityItem]
|
||||||
561
backend/manufacturing/router.py
Normal file
@@ -0,0 +1,561 @@
|
|||||||
|
from fastapi import APIRouter, Depends, Query, HTTPException, UploadFile, File
|
||||||
|
from fastapi.responses import Response
|
||||||
|
from fastapi.responses import RedirectResponse
|
||||||
|
from typing import Optional
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
from auth.models import TokenPayload
|
||||||
|
from auth.dependencies import require_permission
|
||||||
|
from manufacturing.models import (
|
||||||
|
BatchCreate, BatchResponse,
|
||||||
|
DeviceInventoryItem, DeviceInventoryListResponse,
|
||||||
|
DeviceStatusUpdate, DeviceAssign,
|
||||||
|
ManufacturingStats,
|
||||||
|
)
|
||||||
|
from manufacturing import service
|
||||||
|
from manufacturing import audit
|
||||||
|
from shared.exceptions import NotFoundError
|
||||||
|
from shared.firebase import get_db as get_firestore
|
||||||
|
|
||||||
|
|
||||||
|
class LifecycleEntryPatch(BaseModel):
|
||||||
|
index: int
|
||||||
|
date: Optional[str] = None
|
||||||
|
note: Optional[str] = None
|
||||||
|
|
||||||
|
class LifecycleEntryCreate(BaseModel):
|
||||||
|
status_id: str
|
||||||
|
date: Optional[str] = None
|
||||||
|
note: Optional[str] = None
|
||||||
|
|
||||||
|
VALID_FLASH_ASSETS = {"bootloader.bin", "partitions.bin"}
|
||||||
|
VALID_HW_TYPES_MFG = {"vesper", "vesper_plus", "vesper_pro", "agnus", "agnus_mini", "chronos", "chronos_pro"}
|
||||||
|
# Bespoke UIDs are dynamic — we allow any non-empty slug that doesn't clash with
|
||||||
|
# a standard hw_type name. The flash-asset upload endpoint checks this below.
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/api/manufacturing", tags=["manufacturing"])
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/stats", response_model=ManufacturingStats)
|
||||||
|
def get_stats(
|
||||||
|
_user: TokenPayload = Depends(require_permission("manufacturing", "view")),
|
||||||
|
):
|
||||||
|
return service.get_stats()
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/audit-log")
|
||||||
|
async def get_audit_log(
|
||||||
|
limit: int = Query(20, ge=1, le=100),
|
||||||
|
_user: TokenPayload = Depends(require_permission("manufacturing", "view")),
|
||||||
|
):
|
||||||
|
entries = await audit.get_recent(limit=limit)
|
||||||
|
return {"entries": entries}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/batch", response_model=BatchResponse, status_code=201)
|
||||||
|
async def create_batch(
|
||||||
|
body: BatchCreate,
|
||||||
|
user: TokenPayload = Depends(require_permission("manufacturing", "add")),
|
||||||
|
):
|
||||||
|
result = service.create_batch(body)
|
||||||
|
await audit.log_action(
|
||||||
|
admin_user=user.email,
|
||||||
|
action="batch_created",
|
||||||
|
detail={
|
||||||
|
"batch_id": result.batch_id,
|
||||||
|
"board_type": result.board_type,
|
||||||
|
"board_version": result.board_version,
|
||||||
|
"quantity": len(result.serial_numbers),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/devices", response_model=DeviceInventoryListResponse)
|
||||||
|
def list_devices(
|
||||||
|
status: Optional[str] = Query(None),
|
||||||
|
hw_type: Optional[str] = Query(None),
|
||||||
|
search: Optional[str] = Query(None),
|
||||||
|
limit: int = Query(100, ge=1, le=500),
|
||||||
|
offset: int = Query(0, ge=0),
|
||||||
|
_user: TokenPayload = Depends(require_permission("manufacturing", "view")),
|
||||||
|
):
|
||||||
|
items = service.list_devices(
|
||||||
|
status=status,
|
||||||
|
hw_type=hw_type,
|
||||||
|
search=search,
|
||||||
|
limit=limit,
|
||||||
|
offset=offset,
|
||||||
|
)
|
||||||
|
return DeviceInventoryListResponse(devices=items, total=len(items))
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/devices/{sn}", response_model=DeviceInventoryItem)
|
||||||
|
def get_device(
|
||||||
|
sn: str,
|
||||||
|
_user: TokenPayload = Depends(require_permission("manufacturing", "view")),
|
||||||
|
):
|
||||||
|
return service.get_device_by_sn(sn)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/customers/search")
|
||||||
|
def search_customers(
|
||||||
|
q: str = Query(""),
|
||||||
|
_user: TokenPayload = Depends(require_permission("manufacturing", "view")),
|
||||||
|
):
|
||||||
|
"""Search CRM customers by name, email, phone, organization, or tags."""
|
||||||
|
results = service.search_customers(q)
|
||||||
|
return {"results": results}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/customers/{customer_id}")
|
||||||
|
def get_customer(
|
||||||
|
customer_id: str,
|
||||||
|
_user: TokenPayload = Depends(require_permission("manufacturing", "view")),
|
||||||
|
):
|
||||||
|
"""Get a single CRM customer by ID."""
|
||||||
|
db = get_firestore()
|
||||||
|
doc = db.collection("crm_customers").document(customer_id).get()
|
||||||
|
if not doc.exists:
|
||||||
|
raise HTTPException(status_code=404, detail="Customer not found")
|
||||||
|
data = doc.to_dict() or {}
|
||||||
|
loc = data.get("location") or {}
|
||||||
|
city = loc.get("city") if isinstance(loc, dict) else None
|
||||||
|
return {
|
||||||
|
"id": doc.id,
|
||||||
|
"name": data.get("name") or "",
|
||||||
|
"surname": data.get("surname") or "",
|
||||||
|
"email": data.get("email") or "",
|
||||||
|
"organization": data.get("organization") or "",
|
||||||
|
"phone": data.get("phone") or "",
|
||||||
|
"city": city or "",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.patch("/devices/{sn}/status", response_model=DeviceInventoryItem)
|
||||||
|
async def update_status(
|
||||||
|
sn: str,
|
||||||
|
body: DeviceStatusUpdate,
|
||||||
|
user: TokenPayload = Depends(require_permission("manufacturing", "edit")),
|
||||||
|
):
|
||||||
|
# Guard: claimed requires at least one user in user_list
|
||||||
|
# (allow if explicitly force_claimed=true, which the mfg UI sets after adding a user manually)
|
||||||
|
if body.status.value == "claimed":
|
||||||
|
db = get_firestore()
|
||||||
|
docs = list(db.collection("devices").where("serial_number", "==", sn).limit(1).stream())
|
||||||
|
if docs:
|
||||||
|
data = docs[0].to_dict() or {}
|
||||||
|
user_list = data.get("user_list", []) or []
|
||||||
|
if not user_list and not getattr(body, "force_claimed", False):
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail="Cannot set status to 'claimed': device has no users in user_list. "
|
||||||
|
"Assign a user first, then set to Claimed.",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Guard: sold requires a customer assigned
|
||||||
|
if body.status.value == "sold":
|
||||||
|
db = get_firestore()
|
||||||
|
docs = list(db.collection("devices").where("serial_number", "==", sn).limit(1).stream())
|
||||||
|
if docs:
|
||||||
|
data = docs[0].to_dict() or {}
|
||||||
|
if not data.get("customer_id"):
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail="Cannot set status to 'sold' without an assigned customer. "
|
||||||
|
"Use the 'Assign to Customer' action first.",
|
||||||
|
)
|
||||||
|
|
||||||
|
result = service.update_device_status(sn, body, set_by=user.email)
|
||||||
|
await audit.log_action(
|
||||||
|
admin_user=user.email,
|
||||||
|
action="status_updated",
|
||||||
|
serial_number=sn,
|
||||||
|
detail={"status": body.status.value, "note": body.note},
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@router.patch("/devices/{sn}/lifecycle", response_model=DeviceInventoryItem)
|
||||||
|
async def patch_lifecycle_entry(
|
||||||
|
sn: str,
|
||||||
|
body: LifecycleEntryPatch,
|
||||||
|
user: TokenPayload = Depends(require_permission("manufacturing", "edit")),
|
||||||
|
):
|
||||||
|
"""Edit the date and/or note of a lifecycle history entry by index."""
|
||||||
|
db = get_firestore()
|
||||||
|
docs = list(db.collection("devices").where("serial_number", "==", sn).limit(1).stream())
|
||||||
|
if not docs:
|
||||||
|
raise HTTPException(status_code=404, detail="Device not found")
|
||||||
|
doc_ref = docs[0].reference
|
||||||
|
data = docs[0].to_dict() or {}
|
||||||
|
history = data.get("lifecycle_history") or []
|
||||||
|
if body.index < 0 or body.index >= len(history):
|
||||||
|
raise HTTPException(status_code=400, detail="Invalid lifecycle entry index")
|
||||||
|
if body.date is not None:
|
||||||
|
history[body.index]["date"] = body.date
|
||||||
|
if body.note is not None:
|
||||||
|
history[body.index]["note"] = body.note
|
||||||
|
doc_ref.update({"lifecycle_history": history})
|
||||||
|
from manufacturing.service import _doc_to_inventory_item
|
||||||
|
return _doc_to_inventory_item(doc_ref.get())
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/devices/{sn}/lifecycle", response_model=DeviceInventoryItem, status_code=200)
|
||||||
|
async def create_lifecycle_entry(
|
||||||
|
sn: str,
|
||||||
|
body: LifecycleEntryCreate,
|
||||||
|
user: TokenPayload = Depends(require_permission("manufacturing", "edit")),
|
||||||
|
):
|
||||||
|
"""Upsert a lifecycle history entry for the given status_id.
|
||||||
|
|
||||||
|
If an entry for this status already exists it is overwritten in-place;
|
||||||
|
otherwise a new entry is appended. This prevents duplicate entries when
|
||||||
|
a status is visited more than once (max one entry per status).
|
||||||
|
"""
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
db = get_firestore()
|
||||||
|
docs = list(db.collection("devices").where("serial_number", "==", sn).limit(1).stream())
|
||||||
|
if not docs:
|
||||||
|
raise HTTPException(status_code=404, detail="Device not found")
|
||||||
|
doc_ref = docs[0].reference
|
||||||
|
data = docs[0].to_dict() or {}
|
||||||
|
history = list(data.get("lifecycle_history") or [])
|
||||||
|
|
||||||
|
new_entry = {
|
||||||
|
"status_id": body.status_id,
|
||||||
|
"date": body.date or datetime.now(timezone.utc).isoformat(),
|
||||||
|
"note": body.note,
|
||||||
|
"set_by": user.email,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Overwrite existing entry for this status if present, else append
|
||||||
|
existing_idx = next(
|
||||||
|
(i for i, e in enumerate(history) if e.get("status_id") == body.status_id),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
if existing_idx is not None:
|
||||||
|
history[existing_idx] = new_entry
|
||||||
|
else:
|
||||||
|
history.append(new_entry)
|
||||||
|
|
||||||
|
doc_ref.update({"lifecycle_history": history})
|
||||||
|
from manufacturing.service import _doc_to_inventory_item
|
||||||
|
return _doc_to_inventory_item(doc_ref.get())
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/devices/{sn}/lifecycle/{index}", response_model=DeviceInventoryItem)
|
||||||
|
async def delete_lifecycle_entry(
|
||||||
|
sn: str,
|
||||||
|
index: int,
|
||||||
|
user: TokenPayload = Depends(require_permission("manufacturing", "edit")),
|
||||||
|
):
|
||||||
|
"""Delete a lifecycle history entry by index. Cannot delete the entry for the current status."""
|
||||||
|
db = get_firestore()
|
||||||
|
docs = list(db.collection("devices").where("serial_number", "==", sn).limit(1).stream())
|
||||||
|
if not docs:
|
||||||
|
raise HTTPException(status_code=404, detail="Device not found")
|
||||||
|
doc_ref = docs[0].reference
|
||||||
|
data = docs[0].to_dict() or {}
|
||||||
|
history = data.get("lifecycle_history") or []
|
||||||
|
if index < 0 or index >= len(history):
|
||||||
|
raise HTTPException(status_code=400, detail="Invalid lifecycle entry index")
|
||||||
|
current_status = data.get("mfg_status", "")
|
||||||
|
if history[index].get("status_id") == current_status:
|
||||||
|
raise HTTPException(status_code=400, detail="Cannot delete the entry for the current status. Change the status first.")
|
||||||
|
history.pop(index)
|
||||||
|
doc_ref.update({"lifecycle_history": history})
|
||||||
|
from manufacturing.service import _doc_to_inventory_item
|
||||||
|
return _doc_to_inventory_item(doc_ref.get())
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/devices/{sn}/nvs.bin")
|
||||||
|
async def download_nvs(
|
||||||
|
sn: str,
|
||||||
|
hw_type_override: Optional[str] = Query(None, description="Override hw_type written to NVS (for bespoke firmware)"),
|
||||||
|
hw_revision_override: Optional[str] = Query(None, description="Override hw_revision written to NVS (for bespoke firmware)"),
|
||||||
|
nvs_schema: Optional[str] = Query(None, description="NVS schema to use: 'legacy' or 'new' (default)"),
|
||||||
|
user: TokenPayload = Depends(require_permission("manufacturing", "view")),
|
||||||
|
):
|
||||||
|
binary = service.get_nvs_binary(sn, hw_type_override=hw_type_override, hw_revision_override=hw_revision_override, legacy=(nvs_schema == "legacy"))
|
||||||
|
await audit.log_action(
|
||||||
|
admin_user=user.email,
|
||||||
|
action="device_flashed",
|
||||||
|
serial_number=sn,
|
||||||
|
)
|
||||||
|
return Response(
|
||||||
|
content=binary,
|
||||||
|
media_type="application/octet-stream",
|
||||||
|
headers={"Content-Disposition": f'attachment; filename="{sn}_nvs.bin"'},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/devices/{sn}/assign", response_model=DeviceInventoryItem)
|
||||||
|
async def assign_device(
|
||||||
|
sn: str,
|
||||||
|
body: DeviceAssign,
|
||||||
|
user: TokenPayload = Depends(require_permission("manufacturing", "edit")),
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
result = service.assign_device(sn, body)
|
||||||
|
except NotFoundError as e:
|
||||||
|
raise HTTPException(status_code=404, detail=str(e))
|
||||||
|
await audit.log_action(
|
||||||
|
admin_user=user.email,
|
||||||
|
action="device_assigned",
|
||||||
|
serial_number=sn,
|
||||||
|
detail={"customer_id": body.customer_id},
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/devices/{sn}", status_code=204)
|
||||||
|
async def delete_device(
|
||||||
|
sn: str,
|
||||||
|
force: bool = Query(False, description="Required to delete sold/claimed devices"),
|
||||||
|
user: TokenPayload = Depends(require_permission("manufacturing", "delete")),
|
||||||
|
):
|
||||||
|
"""Delete a device. Sold/claimed devices require force=true."""
|
||||||
|
try:
|
||||||
|
service.delete_device(sn, force=force)
|
||||||
|
except NotFoundError:
|
||||||
|
raise HTTPException(status_code=404, detail="Device not found")
|
||||||
|
except PermissionError as e:
|
||||||
|
raise HTTPException(status_code=403, detail=str(e))
|
||||||
|
await audit.log_action(
|
||||||
|
admin_user=user.email,
|
||||||
|
action="device_deleted",
|
||||||
|
serial_number=sn,
|
||||||
|
detail={"force": force},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/devices/{sn}/email/manufactured", status_code=204)
|
||||||
|
async def send_manufactured_email(
|
||||||
|
sn: str,
|
||||||
|
user: TokenPayload = Depends(require_permission("manufacturing", "edit")),
|
||||||
|
):
|
||||||
|
"""Send the 'device manufactured' notification to the assigned customer's email."""
|
||||||
|
db = get_firestore()
|
||||||
|
docs = list(db.collection("devices").where("serial_number", "==", sn).limit(1).stream())
|
||||||
|
if not docs:
|
||||||
|
raise HTTPException(status_code=404, detail="Device not found")
|
||||||
|
data = docs[0].to_dict() or {}
|
||||||
|
customer_id = data.get("customer_id")
|
||||||
|
if not customer_id:
|
||||||
|
raise HTTPException(status_code=400, detail="No customer assigned to this device")
|
||||||
|
customer_doc = db.collection("crm_customers").document(customer_id).get()
|
||||||
|
if not customer_doc.exists:
|
||||||
|
raise HTTPException(status_code=404, detail="Assigned customer not found")
|
||||||
|
cdata = customer_doc.to_dict() or {}
|
||||||
|
email = cdata.get("email")
|
||||||
|
if not email:
|
||||||
|
raise HTTPException(status_code=400, detail="Customer has no email address")
|
||||||
|
name_parts = [cdata.get("name") or "", cdata.get("surname") or ""]
|
||||||
|
customer_name = " ".join(p for p in name_parts if p).strip() or None
|
||||||
|
hw_family = data.get("hw_family") or data.get("hw_type") or ""
|
||||||
|
from utils.emails.device_mfged_mail import send_device_manufactured_email
|
||||||
|
send_device_manufactured_email(
|
||||||
|
customer_email=email,
|
||||||
|
serial_number=sn,
|
||||||
|
device_name=hw_family.replace("_", " ").title(),
|
||||||
|
customer_name=customer_name,
|
||||||
|
)
|
||||||
|
await audit.log_action(
|
||||||
|
admin_user=user.email,
|
||||||
|
action="email_manufactured_sent",
|
||||||
|
serial_number=sn,
|
||||||
|
detail={"recipient": email},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/devices/{sn}/email/assigned", status_code=204)
|
||||||
|
async def send_assigned_email(
|
||||||
|
sn: str,
|
||||||
|
user: TokenPayload = Depends(require_permission("manufacturing", "edit")),
|
||||||
|
):
|
||||||
|
"""Send the 'device assigned / app instructions' email to the assigned user(s)."""
|
||||||
|
db = get_firestore()
|
||||||
|
docs = list(db.collection("devices").where("serial_number", "==", sn).limit(1).stream())
|
||||||
|
if not docs:
|
||||||
|
raise HTTPException(status_code=404, detail="Device not found")
|
||||||
|
data = docs[0].to_dict() or {}
|
||||||
|
user_list = data.get("user_list") or []
|
||||||
|
if not user_list:
|
||||||
|
raise HTTPException(status_code=400, detail="No users assigned to this device")
|
||||||
|
hw_family = data.get("hw_family") or data.get("hw_type") or ""
|
||||||
|
device_name = hw_family.replace("_", " ").title()
|
||||||
|
from utils.emails.device_assigned_mail import send_device_assigned_email
|
||||||
|
errors = []
|
||||||
|
for uid in user_list:
|
||||||
|
try:
|
||||||
|
user_doc = db.collection("users").document(uid).get()
|
||||||
|
if not user_doc.exists:
|
||||||
|
continue
|
||||||
|
udata = user_doc.to_dict() or {}
|
||||||
|
email = udata.get("email")
|
||||||
|
if not email:
|
||||||
|
continue
|
||||||
|
display_name = udata.get("display_name") or udata.get("name") or None
|
||||||
|
send_device_assigned_email(
|
||||||
|
user_email=email,
|
||||||
|
serial_number=sn,
|
||||||
|
device_name=device_name,
|
||||||
|
user_name=display_name,
|
||||||
|
)
|
||||||
|
except Exception as exc:
|
||||||
|
errors.append(str(exc))
|
||||||
|
if errors:
|
||||||
|
raise HTTPException(status_code=500, detail=f"Some emails failed: {'; '.join(errors)}")
|
||||||
|
await audit.log_action(
|
||||||
|
admin_user=user.email,
|
||||||
|
action="email_assigned_sent",
|
||||||
|
serial_number=sn,
|
||||||
|
detail={"user_count": len(user_list)},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/devices", status_code=200)
|
||||||
|
async def delete_unprovisioned(
|
||||||
|
user: TokenPayload = Depends(require_permission("manufacturing", "delete")),
|
||||||
|
):
|
||||||
|
"""Delete all devices with status 'manufactured' (never provisioned)."""
|
||||||
|
deleted = service.delete_unprovisioned_devices()
|
||||||
|
await audit.log_action(
|
||||||
|
admin_user=user.email,
|
||||||
|
action="bulk_delete_unprovisioned",
|
||||||
|
detail={"count": len(deleted), "serial_numbers": deleted},
|
||||||
|
)
|
||||||
|
return {"deleted": deleted, "count": len(deleted)}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/devices/{sn}/firmware.bin")
|
||||||
|
def redirect_firmware(
|
||||||
|
sn: str,
|
||||||
|
_user: TokenPayload = Depends(require_permission("manufacturing", "view")),
|
||||||
|
):
|
||||||
|
"""Redirect to the latest stable firmware binary for this device's hw_type.
|
||||||
|
Resolves to GET /api/firmware/{hw_type}/stable/{version}/firmware.bin.
|
||||||
|
"""
|
||||||
|
url = service.get_firmware_url(sn)
|
||||||
|
return RedirectResponse(url=url, status_code=302)
|
||||||
|
|
||||||
|
|
||||||
|
# ─────────────────────────────────────────────────────────────────────────────
|
||||||
|
# Flash assets — bootloader.bin and partitions.bin per hw_type
|
||||||
|
# These are the binaries that must be flashed at fixed addresses during full
|
||||||
|
# provisioning (0x1000 bootloader, 0x8000 partition table).
|
||||||
|
# They are NOT flashed during OTA updates — only during initial provisioning.
|
||||||
|
# Upload once per hw_type after each PlatformIO build that changes the layout.
|
||||||
|
# ─────────────────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
@router.get("/flash-assets")
|
||||||
|
def list_flash_assets(
|
||||||
|
_user: TokenPayload = Depends(require_permission("manufacturing", "view")),
|
||||||
|
):
|
||||||
|
"""Return asset status for all known board types (and any discovered bespoke UIDs).
|
||||||
|
|
||||||
|
Checks the filesystem directly — no database involved.
|
||||||
|
Each entry contains: hw_type, bootloader (exists, size, uploaded_at), partitions (same), note.
|
||||||
|
"""
|
||||||
|
return {"assets": service.list_flash_assets()}
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/flash-assets/{hw_type}/{asset}", status_code=204)
|
||||||
|
async def delete_flash_asset(
|
||||||
|
hw_type: str,
|
||||||
|
asset: str,
|
||||||
|
user: TokenPayload = Depends(require_permission("manufacturing", "delete")),
|
||||||
|
):
|
||||||
|
"""Delete a single flash asset file (bootloader.bin or partitions.bin)."""
|
||||||
|
if asset not in VALID_FLASH_ASSETS:
|
||||||
|
raise HTTPException(status_code=400, detail=f"Invalid asset. Must be one of: {', '.join(sorted(VALID_FLASH_ASSETS))}")
|
||||||
|
try:
|
||||||
|
service.delete_flash_asset(hw_type, asset)
|
||||||
|
except NotFoundError as e:
|
||||||
|
raise HTTPException(status_code=404, detail=str(e))
|
||||||
|
await audit.log_action(
|
||||||
|
admin_user=user.email,
|
||||||
|
action="flash_asset_deleted",
|
||||||
|
detail={"hw_type": hw_type, "asset": asset},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class FlashAssetNoteBody(BaseModel):
|
||||||
|
note: str
|
||||||
|
|
||||||
|
|
||||||
|
@router.put("/flash-assets/{hw_type}/note", status_code=204)
|
||||||
|
async def set_flash_asset_note(
|
||||||
|
hw_type: str,
|
||||||
|
body: FlashAssetNoteBody,
|
||||||
|
_user: TokenPayload = Depends(require_permission("manufacturing", "edit")),
|
||||||
|
):
|
||||||
|
"""Save (or overwrite) the note for a hw_type's flash asset set.
|
||||||
|
|
||||||
|
The note is stored as note.txt next to the binary files.
|
||||||
|
Pass an empty string to clear the note.
|
||||||
|
"""
|
||||||
|
service.set_flash_asset_note(hw_type, body.note)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/flash-assets/{hw_type}/{asset}", status_code=204)
|
||||||
|
async def upload_flash_asset(
|
||||||
|
hw_type: str,
|
||||||
|
asset: str,
|
||||||
|
file: UploadFile = File(...),
|
||||||
|
_user: TokenPayload = Depends(require_permission("manufacturing", "add")),
|
||||||
|
):
|
||||||
|
"""Upload a bootloader.bin or partitions.bin for a given hw_type.
|
||||||
|
|
||||||
|
These are build artifacts from PlatformIO (.pio/build/{env}/bootloader.bin
|
||||||
|
and .pio/build/{env}/partitions.bin). Upload them once per hw_type after
|
||||||
|
each PlatformIO build that changes the partition layout.
|
||||||
|
"""
|
||||||
|
# hw_type can be a standard board type OR a bespoke UID (any non-empty slug)
|
||||||
|
if not hw_type or len(hw_type) > 128:
|
||||||
|
raise HTTPException(status_code=400, detail="Invalid hw_type/bespoke UID.")
|
||||||
|
if asset not in VALID_FLASH_ASSETS:
|
||||||
|
raise HTTPException(status_code=400, detail=f"Invalid asset. Must be one of: {', '.join(sorted(VALID_FLASH_ASSETS))}")
|
||||||
|
data = await file.read()
|
||||||
|
service.save_flash_asset(hw_type, asset, data)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/devices/{sn}/bootloader.bin")
|
||||||
|
def download_bootloader(
|
||||||
|
sn: str,
|
||||||
|
hw_type_override: Optional[str] = Query(None, description="Override hw_type for flash asset lookup (for bespoke firmware)"),
|
||||||
|
_user: TokenPayload = Depends(require_permission("manufacturing", "view")),
|
||||||
|
):
|
||||||
|
"""Return the bootloader.bin for this device's hw_type (flashed at 0x1000)."""
|
||||||
|
item = service.get_device_by_sn(sn)
|
||||||
|
hw_type = hw_type_override or item.hw_type
|
||||||
|
try:
|
||||||
|
data = service.get_flash_asset(hw_type, "bootloader.bin")
|
||||||
|
except NotFoundError as e:
|
||||||
|
raise HTTPException(status_code=404, detail=str(e))
|
||||||
|
return Response(
|
||||||
|
content=data,
|
||||||
|
media_type="application/octet-stream",
|
||||||
|
headers={"Content-Disposition": f'attachment; filename="bootloader_{hw_type}.bin"'},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/devices/{sn}/partitions.bin")
|
||||||
|
def download_partitions(
|
||||||
|
sn: str,
|
||||||
|
hw_type_override: Optional[str] = Query(None, description="Override hw_type for flash asset lookup (for bespoke firmware)"),
|
||||||
|
_user: TokenPayload = Depends(require_permission("manufacturing", "view")),
|
||||||
|
):
|
||||||
|
"""Return the partitions.bin for this device's hw_type (flashed at 0x8000)."""
|
||||||
|
item = service.get_device_by_sn(sn)
|
||||||
|
hw_type = hw_type_override or item.hw_type
|
||||||
|
try:
|
||||||
|
data = service.get_flash_asset(hw_type, "partitions.bin")
|
||||||
|
except NotFoundError as e:
|
||||||
|
raise HTTPException(status_code=404, detail=str(e))
|
||||||
|
return Response(
|
||||||
|
content=data,
|
||||||
|
media_type="application/octet-stream",
|
||||||
|
headers={"Content-Disposition": f'attachment; filename="partitions_{hw_type}.bin"'},
|
||||||
|
)
|
||||||
496
backend/manufacturing/service.py
Normal file
@@ -0,0 +1,496 @@
|
|||||||
|
import logging
|
||||||
|
import random
|
||||||
|
import string
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
from config import settings
|
||||||
|
from shared.firebase import get_db
|
||||||
|
from shared.exceptions import NotFoundError
|
||||||
|
from utils.serial_number import generate_serial
|
||||||
|
from utils.nvs_generator import generate as generate_nvs_binary
|
||||||
|
from manufacturing.models import BatchCreate, BatchResponse, DeviceInventoryItem, DeviceStatusUpdate, DeviceAssign, ManufacturingStats, RecentActivityItem, BOARD_TYPE_LABELS
|
||||||
|
|
||||||
|
COLLECTION = "devices"
|
||||||
|
_BATCH_ID_CHARS = string.ascii_uppercase + string.digits
|
||||||
|
|
||||||
|
|
||||||
|
def _make_batch_id() -> str:
|
||||||
|
today = datetime.utcnow().strftime("%y%m%d")
|
||||||
|
suffix = "".join(random.choices(_BATCH_ID_CHARS, k=4))
|
||||||
|
return f"BATCH-{today}-{suffix}"
|
||||||
|
|
||||||
|
|
||||||
|
def _get_existing_sns(db) -> set:
|
||||||
|
existing = set()
|
||||||
|
for doc in db.collection(COLLECTION).select(["serial_number"]).stream():
|
||||||
|
data = doc.to_dict()
|
||||||
|
sn = data.get("serial_number")
|
||||||
|
if sn:
|
||||||
|
existing.add(sn)
|
||||||
|
return existing
|
||||||
|
|
||||||
|
|
||||||
|
def _resolve_user_list(raw_list: list) -> list[str]:
|
||||||
|
"""Convert user_list entries (DocumentReferences or path strings) to plain user ID strings."""
|
||||||
|
from google.cloud.firestore_v1 import DocumentReference
|
||||||
|
result = []
|
||||||
|
for entry in raw_list:
|
||||||
|
if isinstance(entry, DocumentReference):
|
||||||
|
result.append(entry.id)
|
||||||
|
elif isinstance(entry, str):
|
||||||
|
result.append(entry.split("/")[-1])
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def _doc_to_inventory_item(doc) -> DeviceInventoryItem:
|
||||||
|
data = doc.to_dict() or {}
|
||||||
|
created_raw = data.get("created_at")
|
||||||
|
if isinstance(created_raw, datetime):
|
||||||
|
created_str = created_raw.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||||
|
else:
|
||||||
|
created_str = str(created_raw) if created_raw else None
|
||||||
|
|
||||||
|
return DeviceInventoryItem(
|
||||||
|
id=doc.id,
|
||||||
|
serial_number=data.get("serial_number", ""),
|
||||||
|
hw_type=data.get("hw_type", ""),
|
||||||
|
hw_version=data.get("hw_version", ""),
|
||||||
|
mfg_status=data.get("mfg_status", "manufactured"),
|
||||||
|
mfg_batch_id=data.get("mfg_batch_id"),
|
||||||
|
created_at=created_str,
|
||||||
|
owner=data.get("owner"),
|
||||||
|
assigned_to=data.get("assigned_to"),
|
||||||
|
device_name=data.get("device_name") or None,
|
||||||
|
lifecycle_history=data.get("lifecycle_history") or [],
|
||||||
|
customer_id=data.get("customer_id"),
|
||||||
|
user_list=_resolve_user_list(data.get("user_list") or []),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def create_batch(data: BatchCreate) -> BatchResponse:
|
||||||
|
db = get_db()
|
||||||
|
existing_sns = _get_existing_sns(db)
|
||||||
|
batch_id = _make_batch_id()
|
||||||
|
now = datetime.now(timezone.utc)
|
||||||
|
serial_numbers = []
|
||||||
|
|
||||||
|
for _ in range(data.quantity):
|
||||||
|
for attempt in range(200):
|
||||||
|
sn = generate_serial(data.board_type.value, data.board_version)
|
||||||
|
if sn not in existing_sns:
|
||||||
|
existing_sns.add(sn)
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
raise RuntimeError("Could not generate unique serial numbers — collision limit hit")
|
||||||
|
|
||||||
|
db.collection(COLLECTION).add({
|
||||||
|
"serial_number": sn,
|
||||||
|
"hw_type": data.board_type.value,
|
||||||
|
"hw_version": data.board_version,
|
||||||
|
"mfg_status": "manufactured",
|
||||||
|
"mfg_batch_id": batch_id,
|
||||||
|
"created_at": now,
|
||||||
|
"owner": None,
|
||||||
|
"assigned_to": None,
|
||||||
|
"user_list": [],
|
||||||
|
# Legacy fields left empty so existing device views don't break
|
||||||
|
"device_name": "",
|
||||||
|
"device_location": "",
|
||||||
|
"is_Online": False,
|
||||||
|
"lifecycle_history": [
|
||||||
|
{
|
||||||
|
"status_id": "manufactured",
|
||||||
|
"date": now.isoformat(),
|
||||||
|
"note": None,
|
||||||
|
"set_by": None,
|
||||||
|
}
|
||||||
|
],
|
||||||
|
})
|
||||||
|
serial_numbers.append(sn)
|
||||||
|
|
||||||
|
return BatchResponse(
|
||||||
|
batch_id=batch_id,
|
||||||
|
serial_numbers=serial_numbers,
|
||||||
|
board_type=data.board_type.value,
|
||||||
|
board_version=data.board_version,
|
||||||
|
created_at=now.strftime("%Y-%m-%dT%H:%M:%SZ"),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def list_devices(
|
||||||
|
status: str | None = None,
|
||||||
|
hw_type: str | None = None,
|
||||||
|
search: str | None = None,
|
||||||
|
limit: int = 100,
|
||||||
|
offset: int = 0,
|
||||||
|
) -> list[DeviceInventoryItem]:
|
||||||
|
db = get_db()
|
||||||
|
query = db.collection(COLLECTION)
|
||||||
|
|
||||||
|
if status:
|
||||||
|
query = query.where("mfg_status", "==", status)
|
||||||
|
if hw_type:
|
||||||
|
query = query.where("hw_type", "==", hw_type)
|
||||||
|
|
||||||
|
docs = list(query.stream())
|
||||||
|
items = [_doc_to_inventory_item(doc) for doc in docs]
|
||||||
|
|
||||||
|
if search:
|
||||||
|
search_lower = search.lower()
|
||||||
|
items = [
|
||||||
|
item for item in items
|
||||||
|
if search_lower in (item.serial_number or "").lower()
|
||||||
|
or search_lower in (item.owner or "").lower()
|
||||||
|
or search_lower in (item.mfg_batch_id or "").lower()
|
||||||
|
]
|
||||||
|
|
||||||
|
return items[offset: offset + limit]
|
||||||
|
|
||||||
|
|
||||||
|
def get_device_by_sn(sn: str) -> DeviceInventoryItem:
|
||||||
|
db = get_db()
|
||||||
|
docs = list(db.collection(COLLECTION).where("serial_number", "==", sn).limit(1).stream())
|
||||||
|
if not docs:
|
||||||
|
raise NotFoundError("Device")
|
||||||
|
return _doc_to_inventory_item(docs[0])
|
||||||
|
|
||||||
|
|
||||||
|
def update_device_status(sn: str, data: DeviceStatusUpdate, set_by: str | None = None) -> DeviceInventoryItem:
|
||||||
|
db = get_db()
|
||||||
|
docs = list(db.collection(COLLECTION).where("serial_number", "==", sn).limit(1).stream())
|
||||||
|
if not docs:
|
||||||
|
raise NotFoundError("Device")
|
||||||
|
|
||||||
|
doc_ref = docs[0].reference
|
||||||
|
doc_data = docs[0].to_dict() or {}
|
||||||
|
now = datetime.now(timezone.utc).isoformat()
|
||||||
|
|
||||||
|
history = list(doc_data.get("lifecycle_history") or [])
|
||||||
|
|
||||||
|
# Upsert lifecycle entry — overwrite existing entry for this status if present
|
||||||
|
new_entry = {
|
||||||
|
"status_id": data.status.value,
|
||||||
|
"date": now,
|
||||||
|
"note": data.note if data.note else None,
|
||||||
|
"set_by": set_by,
|
||||||
|
}
|
||||||
|
existing_idx = next(
|
||||||
|
(i for i, e in enumerate(history) if e.get("status_id") == data.status.value),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
if existing_idx is not None:
|
||||||
|
history[existing_idx] = new_entry
|
||||||
|
else:
|
||||||
|
history.append(new_entry)
|
||||||
|
|
||||||
|
update = {
|
||||||
|
"mfg_status": data.status.value,
|
||||||
|
"lifecycle_history": history,
|
||||||
|
}
|
||||||
|
if data.note:
|
||||||
|
update["mfg_status_note"] = data.note
|
||||||
|
doc_ref.update(update)
|
||||||
|
|
||||||
|
return _doc_to_inventory_item(doc_ref.get())
|
||||||
|
|
||||||
|
|
||||||
|
def get_nvs_binary(sn: str, hw_type_override: str | None = None, hw_revision_override: str | None = None, legacy: bool = False) -> bytes:
|
||||||
|
item = get_device_by_sn(sn)
|
||||||
|
return generate_nvs_binary(
|
||||||
|
serial_number=item.serial_number,
|
||||||
|
hw_family=hw_type_override if hw_type_override else item.hw_type,
|
||||||
|
hw_revision=hw_revision_override if hw_revision_override else item.hw_version,
|
||||||
|
legacy=legacy,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def assign_device(sn: str, data: DeviceAssign) -> DeviceInventoryItem:
|
||||||
|
"""Assign a device to a customer by customer_id.
|
||||||
|
|
||||||
|
- Stores customer_id on the device doc.
|
||||||
|
- Adds the device to the customer's owned_items list.
|
||||||
|
- Sets mfg_status to 'sold' unless device is already 'claimed'.
|
||||||
|
"""
|
||||||
|
db = get_db()
|
||||||
|
CRM_COLLECTION = "crm_customers"
|
||||||
|
|
||||||
|
# Get device doc
|
||||||
|
docs = list(db.collection(COLLECTION).where("serial_number", "==", sn).limit(1).stream())
|
||||||
|
if not docs:
|
||||||
|
raise NotFoundError("Device")
|
||||||
|
|
||||||
|
doc_data = docs[0].to_dict() or {}
|
||||||
|
doc_ref = docs[0].reference
|
||||||
|
current_status = doc_data.get("mfg_status", "manufactured")
|
||||||
|
|
||||||
|
# Get customer doc
|
||||||
|
customer_ref = db.collection(CRM_COLLECTION).document(data.customer_id)
|
||||||
|
customer_doc = customer_ref.get()
|
||||||
|
if not customer_doc.exists:
|
||||||
|
raise NotFoundError("Customer")
|
||||||
|
customer_data = customer_doc.to_dict() or {}
|
||||||
|
|
||||||
|
# Determine new status: don't downgrade claimed → sold
|
||||||
|
new_status = current_status if current_status == "claimed" else "sold"
|
||||||
|
|
||||||
|
now = datetime.now(timezone.utc).isoformat()
|
||||||
|
history = doc_data.get("lifecycle_history") or []
|
||||||
|
history.append({
|
||||||
|
"status_id": new_status,
|
||||||
|
"date": now,
|
||||||
|
"note": "Assigned to customer",
|
||||||
|
"set_by": None,
|
||||||
|
})
|
||||||
|
|
||||||
|
doc_ref.update({
|
||||||
|
"customer_id": data.customer_id,
|
||||||
|
"mfg_status": new_status,
|
||||||
|
"lifecycle_history": history,
|
||||||
|
})
|
||||||
|
|
||||||
|
# Add to customer's owned_items (avoid duplicates)
|
||||||
|
owned_items = customer_data.get("owned_items", []) or []
|
||||||
|
device_doc_id = docs[0].id
|
||||||
|
already_assigned = any(
|
||||||
|
item.get("type") == "console_device"
|
||||||
|
and item.get("console_device", {}).get("device_id") == device_doc_id
|
||||||
|
for item in owned_items
|
||||||
|
)
|
||||||
|
if not already_assigned:
|
||||||
|
device_name = doc_data.get("device_name") or BOARD_TYPE_LABELS.get(doc_data.get("hw_type", ""), sn)
|
||||||
|
owned_items.append({
|
||||||
|
"type": "console_device",
|
||||||
|
"console_device": {
|
||||||
|
"device_id": device_doc_id,
|
||||||
|
"serial_number": sn,
|
||||||
|
"label": device_name,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
customer_ref.update({"owned_items": owned_items})
|
||||||
|
|
||||||
|
return _doc_to_inventory_item(doc_ref.get())
|
||||||
|
|
||||||
|
|
||||||
|
def search_customers(q: str) -> list:
|
||||||
|
"""Search crm_customers by name, email, phone, organization, or tags."""
|
||||||
|
db = get_db()
|
||||||
|
CRM_COLLECTION = "crm_customers"
|
||||||
|
docs = db.collection(CRM_COLLECTION).stream()
|
||||||
|
results = []
|
||||||
|
q_lower = q.lower().strip()
|
||||||
|
for doc in docs:
|
||||||
|
data = doc.to_dict() or {}
|
||||||
|
loc = data.get("location") or {}
|
||||||
|
loc = loc if isinstance(loc, dict) else {}
|
||||||
|
city = loc.get("city") or ""
|
||||||
|
searchable = " ".join(filter(None, [
|
||||||
|
data.get("name"), data.get("surname"),
|
||||||
|
data.get("email"), data.get("phone"), data.get("organization"),
|
||||||
|
loc.get("address"), loc.get("city"), loc.get("postal_code"),
|
||||||
|
loc.get("region"), loc.get("country"),
|
||||||
|
" ".join(data.get("tags") or []),
|
||||||
|
])).lower()
|
||||||
|
if not q_lower or q_lower in searchable:
|
||||||
|
results.append({
|
||||||
|
"id": doc.id,
|
||||||
|
"name": data.get("name") or "",
|
||||||
|
"surname": data.get("surname") or "",
|
||||||
|
"email": data.get("email") or "",
|
||||||
|
"organization": data.get("organization") or "",
|
||||||
|
"phone": data.get("phone") or "",
|
||||||
|
"city": city or "",
|
||||||
|
})
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
def get_stats() -> ManufacturingStats:
|
||||||
|
db = get_db()
|
||||||
|
docs = list(db.collection(COLLECTION).stream())
|
||||||
|
|
||||||
|
all_statuses = ["manufactured", "flashed", "provisioned", "sold", "claimed", "decommissioned"]
|
||||||
|
counts = {s: 0 for s in all_statuses}
|
||||||
|
|
||||||
|
activity_candidates = []
|
||||||
|
for doc in docs:
|
||||||
|
data = doc.to_dict() or {}
|
||||||
|
status = data.get("mfg_status", "manufactured")
|
||||||
|
if status in counts:
|
||||||
|
counts[status] += 1
|
||||||
|
|
||||||
|
if status in ("provisioned", "sold", "claimed"):
|
||||||
|
# Use created_at as a proxy timestamp; Firestore DatetimeWithNanoseconds or plain datetime
|
||||||
|
ts = data.get("created_at")
|
||||||
|
if isinstance(ts, datetime):
|
||||||
|
ts_str = ts.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||||
|
else:
|
||||||
|
ts_str = str(ts) if ts else None
|
||||||
|
|
||||||
|
activity_candidates.append(RecentActivityItem(
|
||||||
|
serial_number=data.get("serial_number", ""),
|
||||||
|
hw_type=data.get("hw_type", ""),
|
||||||
|
mfg_status=status,
|
||||||
|
owner=data.get("owner"),
|
||||||
|
updated_at=ts_str,
|
||||||
|
))
|
||||||
|
|
||||||
|
# Sort by updated_at descending, take latest 10
|
||||||
|
activity_candidates.sort(
|
||||||
|
key=lambda x: x.updated_at or "",
|
||||||
|
reverse=True,
|
||||||
|
)
|
||||||
|
recent = activity_candidates[:10]
|
||||||
|
|
||||||
|
return ManufacturingStats(counts=counts, recent_activity=recent)
|
||||||
|
|
||||||
|
|
||||||
|
PROTECTED_STATUSES = {"sold", "claimed"}
|
||||||
|
|
||||||
|
|
||||||
|
def delete_device(sn: str, force: bool = False) -> None:
|
||||||
|
"""Delete a device by serial number.
|
||||||
|
|
||||||
|
Raises PermissionError if the device is sold/claimed and force is not set.
|
||||||
|
The frontend uses force=True only after the user confirms by typing the SN.
|
||||||
|
"""
|
||||||
|
db = get_db()
|
||||||
|
docs = list(db.collection(COLLECTION).where("serial_number", "==", sn).limit(1).stream())
|
||||||
|
if not docs:
|
||||||
|
raise NotFoundError("Device")
|
||||||
|
|
||||||
|
data = docs[0].to_dict() or {}
|
||||||
|
status = data.get("mfg_status", "manufactured")
|
||||||
|
|
||||||
|
if status in PROTECTED_STATUSES and not force:
|
||||||
|
raise PermissionError(
|
||||||
|
f"Device {sn} has status '{status}' and cannot be deleted without explicit confirmation."
|
||||||
|
)
|
||||||
|
|
||||||
|
docs[0].reference.delete()
|
||||||
|
|
||||||
|
|
||||||
|
def delete_unprovisioned_devices() -> list[str]:
|
||||||
|
"""Delete all devices with status 'manufactured' (never flashed/provisioned).
|
||||||
|
|
||||||
|
Returns the list of deleted serial numbers.
|
||||||
|
"""
|
||||||
|
db = get_db()
|
||||||
|
docs = list(db.collection(COLLECTION).where("mfg_status", "==", "manufactured").stream())
|
||||||
|
deleted = []
|
||||||
|
for doc in docs:
|
||||||
|
data = doc.to_dict() or {}
|
||||||
|
sn = data.get("serial_number", "")
|
||||||
|
doc.reference.delete()
|
||||||
|
deleted.append(sn)
|
||||||
|
return deleted
|
||||||
|
|
||||||
|
|
||||||
|
KNOWN_HW_TYPES = ["vesper", "vesper_plus", "vesper_pro", "agnus", "agnus_mini", "chronos", "chronos_pro"]
|
||||||
|
FLASH_ASSET_FILES = ["bootloader.bin", "partitions.bin"]
|
||||||
|
|
||||||
|
|
||||||
|
def _flash_asset_path(hw_type: str, asset: str) -> Path:
|
||||||
|
"""Return path to a flash asset (bootloader.bin or partitions.bin) for a given hw_type."""
|
||||||
|
return Path(settings.flash_assets_storage_path) / hw_type / asset
|
||||||
|
|
||||||
|
|
||||||
|
def _flash_asset_info(hw_type: str) -> dict:
|
||||||
|
"""Build the asset info dict for a single hw_type by inspecting the filesystem."""
|
||||||
|
base = Path(settings.flash_assets_storage_path) / hw_type
|
||||||
|
note_path = base / "note.txt"
|
||||||
|
note = note_path.read_text(encoding="utf-8").strip() if note_path.exists() else ""
|
||||||
|
|
||||||
|
files = {}
|
||||||
|
for fname in FLASH_ASSET_FILES:
|
||||||
|
p = base / fname
|
||||||
|
if p.exists():
|
||||||
|
stat = p.stat()
|
||||||
|
files[fname] = {
|
||||||
|
"exists": True,
|
||||||
|
"size_bytes": stat.st_size,
|
||||||
|
"uploaded_at": datetime.fromtimestamp(stat.st_mtime, tz=timezone.utc).isoformat(),
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
files[fname] = {"exists": False, "size_bytes": None, "uploaded_at": None}
|
||||||
|
|
||||||
|
return {
|
||||||
|
"hw_type": hw_type,
|
||||||
|
"bootloader": files["bootloader.bin"],
|
||||||
|
"partitions": files["partitions.bin"],
|
||||||
|
"note": note,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def list_flash_assets() -> list:
|
||||||
|
"""Return asset status for all known board types plus any discovered bespoke directories."""
|
||||||
|
base = Path(settings.flash_assets_storage_path)
|
||||||
|
results = []
|
||||||
|
|
||||||
|
# Always include all known hw types, even if no files uploaded yet
|
||||||
|
seen = set(KNOWN_HW_TYPES)
|
||||||
|
for hw_type in KNOWN_HW_TYPES:
|
||||||
|
results.append(_flash_asset_info(hw_type))
|
||||||
|
|
||||||
|
# Discover bespoke directories (anything in storage/flash_assets/ not in known list)
|
||||||
|
if base.exists():
|
||||||
|
for entry in sorted(base.iterdir()):
|
||||||
|
if entry.is_dir() and entry.name not in seen:
|
||||||
|
seen.add(entry.name)
|
||||||
|
info = _flash_asset_info(entry.name)
|
||||||
|
info["is_bespoke"] = True
|
||||||
|
results.append(info)
|
||||||
|
|
||||||
|
# Mark known types
|
||||||
|
for r in results:
|
||||||
|
r.setdefault("is_bespoke", False)
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
def save_flash_asset(hw_type: str, asset: str, data: bytes) -> Path:
|
||||||
|
"""Persist a flash asset binary. asset must be 'bootloader.bin' or 'partitions.bin'."""
|
||||||
|
if asset not in ("bootloader.bin", "partitions.bin"):
|
||||||
|
raise ValueError(f"Unknown flash asset: {asset}")
|
||||||
|
path = _flash_asset_path(hw_type, asset)
|
||||||
|
path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
path.write_bytes(data)
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def delete_flash_asset(hw_type: str, asset: str) -> None:
|
||||||
|
"""Delete a flash asset file. Raises NotFoundError if not present."""
|
||||||
|
path = _flash_asset_path(hw_type, asset)
|
||||||
|
if not path.exists():
|
||||||
|
raise NotFoundError(f"Flash asset '{asset}' for '{hw_type}' not found")
|
||||||
|
path.unlink()
|
||||||
|
|
||||||
|
|
||||||
|
def set_flash_asset_note(hw_type: str, note: str) -> None:
|
||||||
|
"""Write (or clear) the note for a hw_type's flash asset directory."""
|
||||||
|
base = Path(settings.flash_assets_storage_path) / hw_type
|
||||||
|
base.mkdir(parents=True, exist_ok=True)
|
||||||
|
note_path = base / "note.txt"
|
||||||
|
if note.strip():
|
||||||
|
note_path.write_text(note.strip(), encoding="utf-8")
|
||||||
|
elif note_path.exists():
|
||||||
|
note_path.unlink()
|
||||||
|
|
||||||
|
|
||||||
|
def get_flash_asset(hw_type: str, asset: str) -> bytes:
|
||||||
|
"""Load a flash asset binary. Raises NotFoundError if not uploaded yet."""
|
||||||
|
path = _flash_asset_path(hw_type, asset)
|
||||||
|
if not path.exists():
|
||||||
|
raise NotFoundError(f"Flash asset '{asset}' for hw_type '{hw_type}' — upload it first via POST /api/manufacturing/flash-assets/{{hw_type}}/{{asset}}")
|
||||||
|
return path.read_bytes()
|
||||||
|
|
||||||
|
|
||||||
|
def get_firmware_url(sn: str) -> str:
|
||||||
|
"""Return the FastAPI download URL for the latest stable firmware for this device's hw_type."""
|
||||||
|
from firmware.service import get_latest
|
||||||
|
item = get_device_by_sn(sn)
|
||||||
|
hw_type = item.hw_type.lower()
|
||||||
|
latest = get_latest(hw_type, "stable")
|
||||||
|
# download_url is a relative path like /api/firmware/vs/stable/1.4.2/firmware.bin
|
||||||
|
return latest.download_url
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
from mqtt.database import get_db
|
from database import get_db
|
||||||
|
|
||||||
logger = logging.getLogger("melodies.database")
|
logger = logging.getLogger("melodies.database")
|
||||||
|
|
||||||
|
|||||||
@@ -30,6 +30,7 @@ class MelodyInfo(BaseModel):
|
|||||||
isTrueRing: bool = False
|
isTrueRing: bool = False
|
||||||
previewURL: str = ""
|
previewURL: str = ""
|
||||||
archetype_csv: Optional[str] = None
|
archetype_csv: Optional[str] = None
|
||||||
|
outdated_archetype: bool = False
|
||||||
|
|
||||||
|
|
||||||
class MelodyAttributes(BaseModel):
|
class MelodyAttributes(BaseModel):
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
from fastapi import APIRouter, Depends, UploadFile, File, Query, HTTPException
|
from fastapi import APIRouter, Depends, UploadFile, File, Query, HTTPException, Response
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
from auth.models import TokenPayload
|
from auth.models import TokenPayload
|
||||||
from auth.dependencies import require_permission
|
from auth.dependencies import require_permission
|
||||||
@@ -43,7 +43,7 @@ async def create_melody(
|
|||||||
publish: bool = Query(False),
|
publish: bool = Query(False),
|
||||||
_user: TokenPayload = Depends(require_permission("melodies", "add")),
|
_user: TokenPayload = Depends(require_permission("melodies", "add")),
|
||||||
):
|
):
|
||||||
return await service.create_melody(body, publish=publish)
|
return await service.create_melody(body, publish=publish, actor_name=_user.name)
|
||||||
|
|
||||||
|
|
||||||
@router.put("/{melody_id}", response_model=MelodyInDB)
|
@router.put("/{melody_id}", response_model=MelodyInDB)
|
||||||
@@ -52,7 +52,7 @@ async def update_melody(
|
|||||||
body: MelodyUpdate,
|
body: MelodyUpdate,
|
||||||
_user: TokenPayload = Depends(require_permission("melodies", "edit")),
|
_user: TokenPayload = Depends(require_permission("melodies", "edit")),
|
||||||
):
|
):
|
||||||
return await service.update_melody(melody_id, body)
|
return await service.update_melody(melody_id, body, actor_name=_user.name)
|
||||||
|
|
||||||
|
|
||||||
@router.delete("/{melody_id}", status_code=204)
|
@router.delete("/{melody_id}", status_code=204)
|
||||||
@@ -99,7 +99,14 @@ async def upload_file(
|
|||||||
if file_type == "binary":
|
if file_type == "binary":
|
||||||
content_type = "application/octet-stream"
|
content_type = "application/octet-stream"
|
||||||
|
|
||||||
url = service.upload_file(melody_id, contents, file.filename, content_type)
|
url = service.upload_file_for_melody(
|
||||||
|
melody_id=melody_id,
|
||||||
|
melody_uid=melody.uid,
|
||||||
|
melody_pid=melody.pid,
|
||||||
|
file_bytes=contents,
|
||||||
|
filename=file.filename,
|
||||||
|
content_type=content_type,
|
||||||
|
)
|
||||||
|
|
||||||
# Update the melody document with the file URL
|
# Update the melody document with the file URL
|
||||||
if file_type == "preview":
|
if file_type == "preview":
|
||||||
@@ -108,9 +115,9 @@ async def upload_file(
|
|||||||
name=melody.information.name,
|
name=melody.information.name,
|
||||||
previewURL=url,
|
previewURL=url,
|
||||||
)
|
)
|
||||||
))
|
), actor_name=_user.name)
|
||||||
elif file_type == "binary":
|
elif file_type == "binary":
|
||||||
await service.update_melody(melody_id, MelodyUpdate(url=url))
|
await service.update_melody(melody_id, MelodyUpdate(url=url), actor_name=_user.name)
|
||||||
|
|
||||||
return {"url": url, "file_type": file_type}
|
return {"url": url, "file_type": file_type}
|
||||||
|
|
||||||
@@ -125,8 +132,8 @@ async def delete_file(
|
|||||||
if file_type not in ("binary", "preview"):
|
if file_type not in ("binary", "preview"):
|
||||||
raise HTTPException(status_code=400, detail="file_type must be 'binary' or 'preview'")
|
raise HTTPException(status_code=400, detail="file_type must be 'binary' or 'preview'")
|
||||||
|
|
||||||
await service.get_melody(melody_id)
|
melody = await service.get_melody(melody_id)
|
||||||
service.delete_file(melody_id, file_type)
|
service.delete_file(melody_id, file_type, melody.uid)
|
||||||
|
|
||||||
|
|
||||||
@router.get("/{melody_id}/files")
|
@router.get("/{melody_id}/files")
|
||||||
@@ -135,5 +142,35 @@ async def get_files(
|
|||||||
_user: TokenPayload = Depends(require_permission("melodies", "view")),
|
_user: TokenPayload = Depends(require_permission("melodies", "view")),
|
||||||
):
|
):
|
||||||
"""Get storage file URLs for a melody."""
|
"""Get storage file URLs for a melody."""
|
||||||
await service.get_melody(melody_id)
|
melody = await service.get_melody(melody_id)
|
||||||
return service.get_storage_files(melody_id)
|
return service.get_storage_files(melody_id, melody.uid)
|
||||||
|
|
||||||
|
|
||||||
|
@router.patch("/{melody_id}/set-outdated", response_model=MelodyInDB)
|
||||||
|
async def set_outdated(
|
||||||
|
melody_id: str,
|
||||||
|
outdated: bool = Query(...),
|
||||||
|
_user: TokenPayload = Depends(require_permission("melodies", "edit")),
|
||||||
|
):
|
||||||
|
"""Manually set or clear the outdated_archetype flag on a melody."""
|
||||||
|
melody = await service.get_melody(melody_id)
|
||||||
|
info = melody.information.model_dump()
|
||||||
|
info["outdated_archetype"] = outdated
|
||||||
|
return await service.update_melody(
|
||||||
|
melody_id,
|
||||||
|
MelodyUpdate(information=MelodyInfo(**info)),
|
||||||
|
actor_name=_user.name,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{melody_id}/download/binary")
|
||||||
|
async def download_binary_file(
|
||||||
|
melody_id: str,
|
||||||
|
_user: TokenPayload = Depends(require_permission("melodies", "view")),
|
||||||
|
):
|
||||||
|
"""Download current melody binary with a PID-based filename."""
|
||||||
|
melody = await service.get_melody(melody_id)
|
||||||
|
file_bytes, content_type = service.get_binary_file_bytes(melody_id, melody.uid)
|
||||||
|
filename = f"{(melody.pid or 'binary')}.bsm"
|
||||||
|
headers = {"Content-Disposition": f'attachment; filename="{filename}"'}
|
||||||
|
return Response(content=file_bytes, media_type=content_type, headers=headers)
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import json
|
import json
|
||||||
import uuid
|
import uuid
|
||||||
import logging
|
import logging
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
from shared.firebase import get_db as get_firestore, get_bucket
|
from shared.firebase import get_db as get_firestore, get_bucket
|
||||||
from shared.exceptions import NotFoundError
|
from shared.exceptions import NotFoundError
|
||||||
@@ -93,10 +94,44 @@ async def get_melody(melody_id: str) -> MelodyInDB:
|
|||||||
raise NotFoundError("Melody")
|
raise NotFoundError("Melody")
|
||||||
|
|
||||||
|
|
||||||
async def create_melody(data: MelodyCreate, publish: bool = False) -> MelodyInDB:
|
def _sanitize_metadata_for_create(existing: dict | None, actor_name: str | None) -> dict:
|
||||||
|
now = datetime.utcnow().isoformat() + "Z"
|
||||||
|
metadata = dict(existing or {})
|
||||||
|
creator = metadata.get("createdBy") or actor_name or "Unknown"
|
||||||
|
created_at = metadata.get("dateCreated") or now
|
||||||
|
metadata["createdBy"] = creator
|
||||||
|
metadata["dateCreated"] = created_at
|
||||||
|
metadata["lastEditedBy"] = actor_name or metadata.get("lastEditedBy") or creator
|
||||||
|
metadata["dateEdited"] = now
|
||||||
|
if "adminNotes" not in metadata:
|
||||||
|
metadata["adminNotes"] = []
|
||||||
|
return metadata
|
||||||
|
|
||||||
|
|
||||||
|
def _sanitize_metadata_for_update(existing: dict | None, incoming: dict | None, actor_name: str | None) -> dict:
|
||||||
|
now = datetime.utcnow().isoformat() + "Z"
|
||||||
|
existing_meta = dict(existing or {})
|
||||||
|
incoming_meta = dict(incoming or {})
|
||||||
|
|
||||||
|
# Created fields are immutable after first set.
|
||||||
|
created_by = existing_meta.get("createdBy") or incoming_meta.get("createdBy") or actor_name or "Unknown"
|
||||||
|
date_created = existing_meta.get("dateCreated") or incoming_meta.get("dateCreated") or now
|
||||||
|
|
||||||
|
merged = {**existing_meta, **incoming_meta}
|
||||||
|
merged["createdBy"] = created_by
|
||||||
|
merged["dateCreated"] = date_created
|
||||||
|
merged["lastEditedBy"] = actor_name or incoming_meta.get("lastEditedBy") or existing_meta.get("lastEditedBy") or created_by
|
||||||
|
merged["dateEdited"] = now
|
||||||
|
if "adminNotes" not in merged:
|
||||||
|
merged["adminNotes"] = existing_meta.get("adminNotes", [])
|
||||||
|
return merged
|
||||||
|
|
||||||
|
|
||||||
|
async def create_melody(data: MelodyCreate, publish: bool = False, actor_name: str | None = None) -> MelodyInDB:
|
||||||
"""Create a new melody. If publish=True, also push to Firestore."""
|
"""Create a new melody. If publish=True, also push to Firestore."""
|
||||||
melody_id = str(uuid.uuid4())
|
melody_id = str(uuid.uuid4())
|
||||||
doc_data = data.model_dump()
|
doc_data = data.model_dump()
|
||||||
|
doc_data["metadata"] = _sanitize_metadata_for_create(doc_data.get("metadata"), actor_name)
|
||||||
status = "published" if publish else "draft"
|
status = "published" if publish else "draft"
|
||||||
|
|
||||||
# Always save to SQLite
|
# Always save to SQLite
|
||||||
@@ -110,7 +145,7 @@ async def create_melody(data: MelodyCreate, publish: bool = False) -> MelodyInDB
|
|||||||
return MelodyInDB(id=melody_id, status=status, **doc_data)
|
return MelodyInDB(id=melody_id, status=status, **doc_data)
|
||||||
|
|
||||||
|
|
||||||
async def update_melody(melody_id: str, data: MelodyUpdate) -> MelodyInDB:
|
async def update_melody(melody_id: str, data: MelodyUpdate, actor_name: str | None = None) -> MelodyInDB:
|
||||||
"""Update an existing melody. If published, also update Firestore."""
|
"""Update an existing melody. If published, also update Firestore."""
|
||||||
row = await melody_db.get_melody(melody_id)
|
row = await melody_db.get_melody(melody_id)
|
||||||
if not row:
|
if not row:
|
||||||
@@ -124,6 +159,12 @@ async def update_melody(melody_id: str, data: MelodyUpdate) -> MelodyInDB:
|
|||||||
if key in update_data and key in existing_data:
|
if key in update_data and key in existing_data:
|
||||||
merged = {**existing_data[key], **update_data[key]}
|
merged = {**existing_data[key], **update_data[key]}
|
||||||
update_data[key] = merged
|
update_data[key] = merged
|
||||||
|
if "metadata" in update_data or "metadata" in existing_data:
|
||||||
|
update_data["metadata"] = _sanitize_metadata_for_update(
|
||||||
|
existing_data.get("metadata"),
|
||||||
|
update_data.get("metadata"),
|
||||||
|
actor_name,
|
||||||
|
)
|
||||||
|
|
||||||
merged_data = {**existing_data, **update_data}
|
merged_data = {**existing_data, **update_data}
|
||||||
|
|
||||||
@@ -191,7 +232,7 @@ async def delete_melody(melody_id: str) -> None:
|
|||||||
doc_ref.delete()
|
doc_ref.delete()
|
||||||
|
|
||||||
# Delete storage files
|
# Delete storage files
|
||||||
_delete_storage_files(melody_id)
|
_delete_storage_files(melody_id, row["data"].get("uid"))
|
||||||
|
|
||||||
# Delete from SQLite
|
# Delete from SQLite
|
||||||
await melody_db.delete_melody(melody_id)
|
await melody_db.delete_melody(melody_id)
|
||||||
@@ -215,14 +256,116 @@ def upload_file(melody_id: str, file_bytes: bytes, filename: str, content_type:
|
|||||||
return blob.public_url
|
return blob.public_url
|
||||||
|
|
||||||
|
|
||||||
def delete_file(melody_id: str, file_type: str) -> None:
|
def _is_binary_blob_name(blob_name: str) -> bool:
|
||||||
|
lower = (blob_name or "").lower()
|
||||||
|
base = lower.rsplit("/", 1)[-1]
|
||||||
|
if "preview" in base:
|
||||||
|
return False
|
||||||
|
return ("binary" in base) or base.endswith(".bin") or base.endswith(".bsm")
|
||||||
|
|
||||||
|
|
||||||
|
def _safe_storage_segment(raw: str | None, fallback: str) -> str:
|
||||||
|
value = (raw or "").strip()
|
||||||
|
if not value:
|
||||||
|
value = fallback
|
||||||
|
chars = []
|
||||||
|
for ch in value:
|
||||||
|
if ch.isalnum() or ch in ("-", "_", "."):
|
||||||
|
chars.append(ch)
|
||||||
|
else:
|
||||||
|
chars.append("_")
|
||||||
|
cleaned = "".join(chars).strip("._")
|
||||||
|
return cleaned or fallback
|
||||||
|
|
||||||
|
|
||||||
|
def _storage_prefixes(melody_id: str, melody_uid: str | None) -> list[str]:
|
||||||
|
uid_seg = _safe_storage_segment(melody_uid, melody_id)
|
||||||
|
id_seg = _safe_storage_segment(melody_id, melody_id)
|
||||||
|
prefixes = [f"melodies/{uid_seg}/"]
|
||||||
|
if uid_seg != id_seg:
|
||||||
|
# Legacy path support
|
||||||
|
prefixes.append(f"melodies/{id_seg}/")
|
||||||
|
return prefixes
|
||||||
|
|
||||||
|
|
||||||
|
def _list_blobs_for_prefixes(bucket, prefixes: list[str]):
|
||||||
|
all_blobs = []
|
||||||
|
seen = set()
|
||||||
|
for prefix in prefixes:
|
||||||
|
for blob in bucket.list_blobs(prefix=prefix):
|
||||||
|
if blob.name in seen:
|
||||||
|
continue
|
||||||
|
seen.add(blob.name)
|
||||||
|
all_blobs.append(blob)
|
||||||
|
return all_blobs
|
||||||
|
|
||||||
|
|
||||||
|
def upload_file_for_melody(melody_id: str, melody_uid: str | None, melody_pid: str | None, file_bytes: bytes, filename: str, content_type: str) -> str:
|
||||||
|
"""Upload a file to Firebase Storage under melodies/{melody_uid or melody_id}/.
|
||||||
|
Binary files are stored as {pid}.bsm and replace previous melody binaries.
|
||||||
|
"""
|
||||||
|
bucket = get_bucket()
|
||||||
|
if not bucket:
|
||||||
|
raise RuntimeError("Firebase Storage not initialized")
|
||||||
|
|
||||||
|
prefixes = _storage_prefixes(melody_id, melody_uid)
|
||||||
|
primary_prefix = prefixes[0]
|
||||||
|
|
||||||
|
if content_type in ("application/octet-stream", "application/macbinary"):
|
||||||
|
# Keep one active binary per melody, clean older binaries in both legacy/current prefixes.
|
||||||
|
for blob in _list_blobs_for_prefixes(bucket, prefixes):
|
||||||
|
if _is_binary_blob_name(blob.name):
|
||||||
|
blob.delete()
|
||||||
|
|
||||||
|
stem = filename.rsplit(".", 1)[0] if "." in filename else filename
|
||||||
|
pid_seg = _safe_storage_segment(stem or melody_pid, "binary")
|
||||||
|
storage_path = f"{primary_prefix}{pid_seg}.bsm"
|
||||||
|
binary_content_type = "application/octet-stream"
|
||||||
|
blob = bucket.blob(storage_path)
|
||||||
|
blob.upload_from_string(file_bytes, content_type=binary_content_type)
|
||||||
|
blob.make_public()
|
||||||
|
return blob.public_url
|
||||||
|
|
||||||
|
ext = filename.rsplit(".", 1)[-1] if "." in filename else "mp3"
|
||||||
|
storage_path = f"{primary_prefix}preview.{ext}"
|
||||||
|
blob = bucket.blob(storage_path)
|
||||||
|
blob.upload_from_string(file_bytes, content_type=content_type)
|
||||||
|
blob.make_public()
|
||||||
|
return blob.public_url
|
||||||
|
|
||||||
|
|
||||||
|
def get_binary_file_bytes(melody_id: str, melody_uid: str | None = None) -> tuple[bytes, str]:
|
||||||
|
"""Fetch current binary bytes for a melody from Firebase Storage."""
|
||||||
|
bucket = get_bucket()
|
||||||
|
if not bucket:
|
||||||
|
raise RuntimeError("Firebase Storage not initialized")
|
||||||
|
|
||||||
|
prefixes = _storage_prefixes(melody_id, melody_uid)
|
||||||
|
blobs = [b for b in _list_blobs_for_prefixes(bucket, prefixes) if _is_binary_blob_name(b.name)]
|
||||||
|
if not blobs:
|
||||||
|
raise NotFoundError("Binary file")
|
||||||
|
|
||||||
|
# Prefer explicit binary.* naming, then newest.
|
||||||
|
blobs.sort(
|
||||||
|
key=lambda b: (
|
||||||
|
0 if "binary" in b.name.rsplit("/", 1)[-1].lower() else 1,
|
||||||
|
-(int(b.time_created.timestamp()) if getattr(b, "time_created", None) else 0),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
chosen = blobs[0]
|
||||||
|
data = chosen.download_as_bytes()
|
||||||
|
content_type = chosen.content_type or "application/octet-stream"
|
||||||
|
return data, content_type
|
||||||
|
|
||||||
|
|
||||||
|
def delete_file(melody_id: str, file_type: str, melody_uid: str | None = None) -> None:
|
||||||
"""Delete a specific file from storage. file_type is 'binary' or 'preview'."""
|
"""Delete a specific file from storage. file_type is 'binary' or 'preview'."""
|
||||||
bucket = get_bucket()
|
bucket = get_bucket()
|
||||||
if not bucket:
|
if not bucket:
|
||||||
return
|
return
|
||||||
|
|
||||||
prefix = f"melodies/{melody_id}/"
|
prefixes = _storage_prefixes(melody_id, melody_uid)
|
||||||
blobs = list(bucket.list_blobs(prefix=prefix))
|
blobs = _list_blobs_for_prefixes(bucket, prefixes)
|
||||||
|
|
||||||
for blob in blobs:
|
for blob in blobs:
|
||||||
if file_type == "binary" and "binary" in blob.name:
|
if file_type == "binary" and "binary" in blob.name:
|
||||||
@@ -231,31 +374,31 @@ def delete_file(melody_id: str, file_type: str) -> None:
|
|||||||
blob.delete()
|
blob.delete()
|
||||||
|
|
||||||
|
|
||||||
def _delete_storage_files(melody_id: str) -> None:
|
def _delete_storage_files(melody_id: str, melody_uid: str | None = None) -> None:
|
||||||
"""Delete all storage files for a melody."""
|
"""Delete all storage files for a melody."""
|
||||||
bucket = get_bucket()
|
bucket = get_bucket()
|
||||||
if not bucket:
|
if not bucket:
|
||||||
return
|
return
|
||||||
|
|
||||||
prefix = f"melodies/{melody_id}/"
|
prefixes = _storage_prefixes(melody_id, melody_uid)
|
||||||
blobs = list(bucket.list_blobs(prefix=prefix))
|
blobs = _list_blobs_for_prefixes(bucket, prefixes)
|
||||||
for blob in blobs:
|
for blob in blobs:
|
||||||
blob.delete()
|
blob.delete()
|
||||||
|
|
||||||
|
|
||||||
def get_storage_files(melody_id: str) -> dict:
|
def get_storage_files(melody_id: str, melody_uid: str | None = None) -> dict:
|
||||||
"""List storage files for a melody, returning URLs."""
|
"""List storage files for a melody, returning URLs."""
|
||||||
bucket = get_bucket()
|
bucket = get_bucket()
|
||||||
if not bucket:
|
if not bucket:
|
||||||
return {"binary_url": None, "preview_url": None}
|
return {"binary_url": None, "preview_url": None}
|
||||||
|
|
||||||
prefix = f"melodies/{melody_id}/"
|
prefixes = _storage_prefixes(melody_id, melody_uid)
|
||||||
blobs = list(bucket.list_blobs(prefix=prefix))
|
blobs = _list_blobs_for_prefixes(bucket, prefixes)
|
||||||
|
|
||||||
result = {"binary_url": None, "preview_url": None}
|
result = {"binary_url": None, "preview_url": None}
|
||||||
for blob in blobs:
|
for blob in blobs:
|
||||||
blob.make_public()
|
blob.make_public()
|
||||||
if "binary" in blob.name:
|
if _is_binary_blob_name(blob.name):
|
||||||
result["binary_url"] = blob.public_url
|
result["binary_url"] = blob.public_url
|
||||||
elif "preview" in blob.name:
|
elif "preview" in blob.name:
|
||||||
result["preview_url"] = blob.public_url
|
result["preview_url"] = blob.public_url
|
||||||
|
|||||||
178
backend/migrate_customer_flags.py
Normal file
@@ -0,0 +1,178 @@
|
|||||||
|
"""
|
||||||
|
One-time migration script: convert legacy negotiating/has_problem flags to new structure.
|
||||||
|
|
||||||
|
Run AFTER deploying the new backend code:
|
||||||
|
cd backend && python migrate_customer_flags.py
|
||||||
|
|
||||||
|
What it does:
|
||||||
|
1. For each customer with negotiating=True:
|
||||||
|
- Creates an order subcollection document with status="negotiating"
|
||||||
|
- Sets relationship_status="active" (only if currently "lead" or "prospect")
|
||||||
|
2. For each customer with has_problem=True:
|
||||||
|
- Appends one entry to technical_issues with active=True
|
||||||
|
3. Removes negotiating and has_problem fields from every customer document
|
||||||
|
4. Initialises relationship_status="lead" on any customer missing it
|
||||||
|
5. Recomputes crm_summary for each affected customer
|
||||||
|
"""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import uuid
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
# Make sure we can import backend modules
|
||||||
|
sys.path.insert(0, os.path.dirname(__file__))
|
||||||
|
|
||||||
|
from shared.firebase import init_firebase, get_db
|
||||||
|
|
||||||
|
init_firebase()
|
||||||
|
|
||||||
|
|
||||||
|
def migrate():
|
||||||
|
db = get_db()
|
||||||
|
customers_ref = db.collection("crm_customers")
|
||||||
|
docs = list(customers_ref.stream())
|
||||||
|
print(f"Found {len(docs)} customer documents.")
|
||||||
|
|
||||||
|
migrated_neg = 0
|
||||||
|
migrated_prob = 0
|
||||||
|
now = datetime.utcnow().isoformat()
|
||||||
|
|
||||||
|
for doc in docs:
|
||||||
|
data = doc.to_dict() or {}
|
||||||
|
customer_id = doc.id
|
||||||
|
updates = {}
|
||||||
|
changed = False
|
||||||
|
|
||||||
|
# ── 1. Initialise new fields if missing ──────────────────────────────
|
||||||
|
if "relationship_status" not in data:
|
||||||
|
updates["relationship_status"] = "lead"
|
||||||
|
changed = True
|
||||||
|
if "technical_issues" not in data:
|
||||||
|
updates["technical_issues"] = []
|
||||||
|
changed = True
|
||||||
|
if "install_support" not in data:
|
||||||
|
updates["install_support"] = []
|
||||||
|
changed = True
|
||||||
|
if "transaction_history" not in data:
|
||||||
|
updates["transaction_history"] = []
|
||||||
|
changed = True
|
||||||
|
|
||||||
|
# ── 2. Migrate negotiating flag ───────────────────────────────────────
|
||||||
|
if data.get("negotiating"):
|
||||||
|
order_id = str(uuid.uuid4())
|
||||||
|
order_data = {
|
||||||
|
"customer_id": customer_id,
|
||||||
|
"order_number": f"ORD-{datetime.utcnow().year}-001-migrated",
|
||||||
|
"title": "Migrated from legacy negotiating flag",
|
||||||
|
"created_by": "system",
|
||||||
|
"status": "negotiating",
|
||||||
|
"status_updated_date": now,
|
||||||
|
"status_updated_by": "system",
|
||||||
|
"items": [],
|
||||||
|
"subtotal": 0,
|
||||||
|
"discount": None,
|
||||||
|
"total_price": 0,
|
||||||
|
"currency": "EUR",
|
||||||
|
"shipping": None,
|
||||||
|
"payment_status": {
|
||||||
|
"required_amount": 0,
|
||||||
|
"received_amount": 0,
|
||||||
|
"balance_due": 0,
|
||||||
|
"advance_required": False,
|
||||||
|
"advance_amount": None,
|
||||||
|
"payment_complete": False,
|
||||||
|
},
|
||||||
|
"invoice_path": None,
|
||||||
|
"notes": "Migrated from legacy negotiating flag",
|
||||||
|
"timeline": [{
|
||||||
|
"date": now,
|
||||||
|
"type": "note",
|
||||||
|
"note": "Migrated from legacy negotiating flag",
|
||||||
|
"updated_by": "system",
|
||||||
|
}],
|
||||||
|
"created_at": now,
|
||||||
|
"updated_at": now,
|
||||||
|
}
|
||||||
|
customers_ref.document(customer_id).collection("orders").document(order_id).set(order_data)
|
||||||
|
|
||||||
|
current_rel = updates.get("relationship_status") or data.get("relationship_status", "lead")
|
||||||
|
if current_rel in ("lead", "prospect"):
|
||||||
|
updates["relationship_status"] = "active"
|
||||||
|
|
||||||
|
migrated_neg += 1
|
||||||
|
print(f" [{customer_id}] Created negotiating order, set relationship_status=active")
|
||||||
|
|
||||||
|
# ── 3. Migrate has_problem flag ───────────────────────────────────────
|
||||||
|
if data.get("has_problem"):
|
||||||
|
existing_issues = list(updates.get("technical_issues") or data.get("technical_issues") or [])
|
||||||
|
existing_issues.append({
|
||||||
|
"active": True,
|
||||||
|
"opened_date": data.get("updated_at") or now,
|
||||||
|
"resolved_date": None,
|
||||||
|
"note": "Migrated from legacy has_problem flag",
|
||||||
|
"opened_by": "system",
|
||||||
|
"resolved_by": None,
|
||||||
|
})
|
||||||
|
updates["technical_issues"] = existing_issues
|
||||||
|
migrated_prob += 1
|
||||||
|
changed = True
|
||||||
|
print(f" [{customer_id}] Appended technical issue from has_problem flag")
|
||||||
|
|
||||||
|
# ── 4. Remove legacy fields ───────────────────────────────────────────
|
||||||
|
from google.cloud.firestore_v1 import DELETE_FIELD
|
||||||
|
if "negotiating" in data:
|
||||||
|
updates["negotiating"] = DELETE_FIELD
|
||||||
|
changed = True
|
||||||
|
if "has_problem" in data:
|
||||||
|
updates["has_problem"] = DELETE_FIELD
|
||||||
|
changed = True
|
||||||
|
|
||||||
|
if changed or data.get("negotiating") or data.get("has_problem"):
|
||||||
|
updates["updated_at"] = now
|
||||||
|
customers_ref.document(customer_id).update(updates)
|
||||||
|
|
||||||
|
# ── 5. Recompute crm_summary ──────────────────────────────────────────
|
||||||
|
# Re-read updated doc to compute summary
|
||||||
|
updated_doc = customers_ref.document(customer_id).get()
|
||||||
|
updated_data = updated_doc.to_dict() or {}
|
||||||
|
|
||||||
|
issues = updated_data.get("technical_issues") or []
|
||||||
|
active_issues = [i for i in issues if i.get("active")]
|
||||||
|
support = updated_data.get("install_support") or []
|
||||||
|
active_support = [s for s in support if s.get("active")]
|
||||||
|
|
||||||
|
TERMINAL = {"declined", "complete"}
|
||||||
|
active_order_status = None
|
||||||
|
active_order_status_date = None
|
||||||
|
active_order_title = None
|
||||||
|
latest_date = ""
|
||||||
|
for odoc in customers_ref.document(customer_id).collection("orders").stream():
|
||||||
|
odata = odoc.to_dict() or {}
|
||||||
|
if odata.get("status") not in TERMINAL:
|
||||||
|
upd = odata.get("status_updated_date") or odata.get("created_at") or ""
|
||||||
|
if upd > latest_date:
|
||||||
|
latest_date = upd
|
||||||
|
active_order_status = odata.get("status")
|
||||||
|
active_order_status_date = upd
|
||||||
|
active_order_title = odata.get("title")
|
||||||
|
|
||||||
|
summary = {
|
||||||
|
"active_order_status": active_order_status,
|
||||||
|
"active_order_status_date": active_order_status_date,
|
||||||
|
"active_order_title": active_order_title,
|
||||||
|
"active_issues_count": len(active_issues),
|
||||||
|
"latest_issue_date": max((i.get("opened_date") or "") for i in active_issues) if active_issues else None,
|
||||||
|
"active_support_count": len(active_support),
|
||||||
|
"latest_support_date": max((s.get("opened_date") or "") for s in active_support) if active_support else None,
|
||||||
|
}
|
||||||
|
customers_ref.document(customer_id).update({"crm_summary": summary})
|
||||||
|
|
||||||
|
print(f"\nMigration complete.")
|
||||||
|
print(f" Negotiating orders created: {migrated_neg}")
|
||||||
|
print(f" Technical issues created: {migrated_prob}")
|
||||||
|
print(f" Total customers processed: {len(docs)}")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
migrate()
|
||||||
126
backend/mqtt/auth.py
Normal file
@@ -0,0 +1,126 @@
|
|||||||
|
"""
|
||||||
|
MQTT authentication and ACL endpoints for mosquitto-go-auth HTTP backend.
|
||||||
|
|
||||||
|
Mosquitto calls these on every CONNECT, SUBSCRIBE, and PUBLISH.
|
||||||
|
- POST /mqtt/auth/user → validate device credentials
|
||||||
|
- POST /mqtt/auth/acl → enforce per-device topic isolation
|
||||||
|
|
||||||
|
Password strategy: HMAC-SHA256(MQTT_SECRET, username)[:32]
|
||||||
|
- Deterministic: no storage needed, re-derive on every auth check
|
||||||
|
- Rotating MQTT_SECRET invalidates all passwords at once if needed
|
||||||
|
|
||||||
|
Transition support: during rollout, the legacy password "vesper" is also
|
||||||
|
accepted so that devices still on old firmware stay connected.
|
||||||
|
|
||||||
|
User types handled:
|
||||||
|
- Device users (e.g. "PV25L22BP01R01", "PV-26A18-BC02R-X7KQA"):
|
||||||
|
Authenticated via HMAC. ACL restricted to their own vesper/{sn}/... topics.
|
||||||
|
- Kiosk users (e.g. "PV25L22BP01R01-kiosk"):
|
||||||
|
Same HMAC auth derived from the full kiosk username.
|
||||||
|
ACL: allowed to access topics of their base device (suffix stripped).
|
||||||
|
- admin, bonamin, NodeRED, and other non-device users:
|
||||||
|
These connect via the passwd file backend (go-auth file backend).
|
||||||
|
They never reach this HTTP backend — go-auth resolves them first.
|
||||||
|
The ACL endpoint below handles them defensively anyway (superuser list).
|
||||||
|
"""
|
||||||
|
|
||||||
|
import hmac
|
||||||
|
import hashlib
|
||||||
|
from fastapi import APIRouter, Form, Response
|
||||||
|
from config import settings
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/mqtt/auth", tags=["mqtt-auth"])
|
||||||
|
|
||||||
|
LEGACY_PASSWORD = "vesper"
|
||||||
|
|
||||||
|
# Users authenticated via passwd file (go-auth file backend).
|
||||||
|
# If they somehow reach the HTTP ACL endpoint, grant full access.
|
||||||
|
SUPERUSERS = {"admin", "bonamin", "NodeRED"}
|
||||||
|
|
||||||
|
|
||||||
|
def _derive_password(username: str) -> str:
|
||||||
|
"""Derive the expected MQTT password for a given username."""
|
||||||
|
return hmac.new(
|
||||||
|
settings.mqtt_secret.encode(),
|
||||||
|
username.encode(),
|
||||||
|
hashlib.sha256,
|
||||||
|
).hexdigest()[:32]
|
||||||
|
|
||||||
|
|
||||||
|
def _is_valid_password(username: str, password: str) -> bool:
|
||||||
|
"""
|
||||||
|
Accept the password if it matches either:
|
||||||
|
- The HMAC-derived password (new firmware)
|
||||||
|
- The legacy hardcoded "vesper" password (old firmware, transition period)
|
||||||
|
|
||||||
|
Remove the legacy check in Stage 7 once all devices are on new firmware.
|
||||||
|
"""
|
||||||
|
expected = _derive_password(username)
|
||||||
|
hmac_ok = hmac.compare_digest(expected, password)
|
||||||
|
legacy_ok = hmac.compare_digest(LEGACY_PASSWORD, password)
|
||||||
|
return hmac_ok or legacy_ok
|
||||||
|
|
||||||
|
|
||||||
|
def _base_sn(username: str) -> str:
|
||||||
|
"""
|
||||||
|
Strip the -kiosk suffix if present, returning the base serial number.
|
||||||
|
e.g. "PV25L22BP01R01-kiosk" -> "PV25L22BP01R01"
|
||||||
|
"PV25L22BP01R01" -> "PV25L22BP01R01"
|
||||||
|
"""
|
||||||
|
if username.endswith("-kiosk"):
|
||||||
|
return username[: -len("-kiosk")]
|
||||||
|
return username
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/user")
|
||||||
|
async def mqtt_auth_user(
|
||||||
|
username: str = Form(...),
|
||||||
|
password: str = Form(...),
|
||||||
|
clientid: str = Form(default=""),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Called by Mosquitto on every CONNECT.
|
||||||
|
Returns 200 to allow, 403 to deny.
|
||||||
|
|
||||||
|
Username = device SN (new format: "PV-26A18-BC02R-X7KQA", old format: "PV25L22BP01R01")
|
||||||
|
or kiosk variant: "PV25L22BP01R01-kiosk"
|
||||||
|
Password = HMAC-derived (new firmware) or "vesper" (legacy firmware)
|
||||||
|
|
||||||
|
Note: admin, bonamin and NodeRED authenticate via the go-auth passwd file backend
|
||||||
|
and never reach this endpoint.
|
||||||
|
"""
|
||||||
|
if _is_valid_password(username, password):
|
||||||
|
return Response(status_code=200)
|
||||||
|
|
||||||
|
return Response(status_code=403)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/acl")
|
||||||
|
async def mqtt_auth_acl(
|
||||||
|
username: str = Form(...),
|
||||||
|
topic: str = Form(...),
|
||||||
|
clientid: str = Form(default=""),
|
||||||
|
acc: int = Form(...), # 1 = subscribe, 2 = publish, 3 = subscribe+publish
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Called by Mosquitto on every SUBSCRIBE and PUBLISH.
|
||||||
|
Returns 200 to allow, 403 to deny.
|
||||||
|
|
||||||
|
Topic pattern: vesper/{sn}/...
|
||||||
|
- Device users: may only access their own SN segment
|
||||||
|
- Kiosk users: stripped of -kiosk suffix, then same rule applies
|
||||||
|
- Superusers (bonamin, NodeRED): full access
|
||||||
|
"""
|
||||||
|
# Superusers get full access (shouldn't reach here but handled defensively)
|
||||||
|
if username in SUPERUSERS:
|
||||||
|
return Response(status_code=200)
|
||||||
|
|
||||||
|
# Derive the base SN (handles -kiosk suffix)
|
||||||
|
base = _base_sn(username)
|
||||||
|
|
||||||
|
# Topic must be vesper/{base_sn}/...
|
||||||
|
parts = topic.split("/")
|
||||||
|
if len(parts) >= 2 and parts[0] == "vesper" and parts[1] == base:
|
||||||
|
return Response(status_code=200)
|
||||||
|
|
||||||
|
return Response(status_code=403)
|
||||||
@@ -26,7 +26,7 @@ class MqttManager:
|
|||||||
|
|
||||||
self._client = paho_mqtt.Client(
|
self._client = paho_mqtt.Client(
|
||||||
callback_api_version=paho_mqtt.CallbackAPIVersion.VERSION2,
|
callback_api_version=paho_mqtt.CallbackAPIVersion.VERSION2,
|
||||||
client_id="bellsystems-admin-panel",
|
client_id=settings.mqtt_client_id,
|
||||||
clean_session=True,
|
clean_session=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -64,6 +64,8 @@ class MqttManager:
|
|||||||
client.subscribe([
|
client.subscribe([
|
||||||
("vesper/+/data", 1),
|
("vesper/+/data", 1),
|
||||||
("vesper/+/status/heartbeat", 1),
|
("vesper/+/status/heartbeat", 1),
|
||||||
|
("vesper/+/status/alerts", 1),
|
||||||
|
("vesper/+/status/info", 0),
|
||||||
("vesper/+/logs", 1),
|
("vesper/+/logs", 1),
|
||||||
])
|
])
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -1,243 +0,0 @@
|
|||||||
import aiosqlite
|
|
||||||
import asyncio
|
|
||||||
import json
|
|
||||||
import logging
|
|
||||||
from datetime import datetime, timedelta, timezone
|
|
||||||
from config import settings
|
|
||||||
|
|
||||||
logger = logging.getLogger("mqtt.database")
|
|
||||||
|
|
||||||
_db: aiosqlite.Connection | None = None
|
|
||||||
|
|
||||||
SCHEMA_STATEMENTS = [
|
|
||||||
"""CREATE TABLE IF NOT EXISTS device_logs (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
device_serial TEXT NOT NULL,
|
|
||||||
level TEXT NOT NULL,
|
|
||||||
message TEXT NOT NULL,
|
|
||||||
device_timestamp INTEGER,
|
|
||||||
received_at TEXT NOT NULL DEFAULT (datetime('now'))
|
|
||||||
)""",
|
|
||||||
"""CREATE TABLE IF NOT EXISTS heartbeats (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
device_serial TEXT NOT NULL,
|
|
||||||
device_id TEXT,
|
|
||||||
firmware_version TEXT,
|
|
||||||
ip_address TEXT,
|
|
||||||
gateway TEXT,
|
|
||||||
uptime_ms INTEGER,
|
|
||||||
uptime_display TEXT,
|
|
||||||
received_at TEXT NOT NULL DEFAULT (datetime('now'))
|
|
||||||
)""",
|
|
||||||
"""CREATE TABLE IF NOT EXISTS commands (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
device_serial TEXT NOT NULL,
|
|
||||||
command_name TEXT NOT NULL,
|
|
||||||
command_payload TEXT,
|
|
||||||
status TEXT NOT NULL DEFAULT 'pending',
|
|
||||||
response_payload TEXT,
|
|
||||||
sent_at TEXT NOT NULL DEFAULT (datetime('now')),
|
|
||||||
responded_at TEXT
|
|
||||||
)""",
|
|
||||||
"CREATE INDEX IF NOT EXISTS idx_logs_serial_time ON device_logs(device_serial, received_at)",
|
|
||||||
"CREATE INDEX IF NOT EXISTS idx_logs_level ON device_logs(level)",
|
|
||||||
"CREATE INDEX IF NOT EXISTS idx_heartbeats_serial_time ON heartbeats(device_serial, received_at)",
|
|
||||||
"CREATE INDEX IF NOT EXISTS idx_commands_serial_time ON commands(device_serial, sent_at)",
|
|
||||||
"CREATE INDEX IF NOT EXISTS idx_commands_status ON commands(status)",
|
|
||||||
# Melody drafts table
|
|
||||||
"""CREATE TABLE IF NOT EXISTS melody_drafts (
|
|
||||||
id TEXT PRIMARY KEY,
|
|
||||||
status TEXT NOT NULL DEFAULT 'draft',
|
|
||||||
data TEXT NOT NULL,
|
|
||||||
created_at TEXT NOT NULL DEFAULT (datetime('now')),
|
|
||||||
updated_at TEXT NOT NULL DEFAULT (datetime('now'))
|
|
||||||
)""",
|
|
||||||
"CREATE INDEX IF NOT EXISTS idx_melody_drafts_status ON melody_drafts(status)",
|
|
||||||
# Built melodies table (local melody builder)
|
|
||||||
"""CREATE TABLE IF NOT EXISTS built_melodies (
|
|
||||||
id TEXT PRIMARY KEY,
|
|
||||||
name TEXT NOT NULL,
|
|
||||||
pid TEXT NOT NULL,
|
|
||||||
steps TEXT NOT NULL,
|
|
||||||
binary_path TEXT,
|
|
||||||
progmem_code TEXT,
|
|
||||||
assigned_melody_ids TEXT NOT NULL DEFAULT '[]',
|
|
||||||
created_at TEXT NOT NULL DEFAULT (datetime('now')),
|
|
||||||
updated_at TEXT NOT NULL DEFAULT (datetime('now'))
|
|
||||||
)""",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
async def init_db():
|
|
||||||
global _db
|
|
||||||
_db = await aiosqlite.connect(settings.sqlite_db_path)
|
|
||||||
_db.row_factory = aiosqlite.Row
|
|
||||||
for stmt in SCHEMA_STATEMENTS:
|
|
||||||
await _db.execute(stmt)
|
|
||||||
await _db.commit()
|
|
||||||
logger.info(f"SQLite database initialized at {settings.sqlite_db_path}")
|
|
||||||
|
|
||||||
|
|
||||||
async def close_db():
|
|
||||||
global _db
|
|
||||||
if _db:
|
|
||||||
await _db.close()
|
|
||||||
_db = None
|
|
||||||
|
|
||||||
|
|
||||||
async def get_db() -> aiosqlite.Connection:
|
|
||||||
if _db is None:
|
|
||||||
await init_db()
|
|
||||||
return _db
|
|
||||||
|
|
||||||
|
|
||||||
# --- Insert Operations ---
|
|
||||||
|
|
||||||
async def insert_log(device_serial: str, level: str, message: str,
|
|
||||||
device_timestamp: int | None = None):
|
|
||||||
db = await get_db()
|
|
||||||
cursor = await db.execute(
|
|
||||||
"INSERT INTO device_logs (device_serial, level, message, device_timestamp) VALUES (?, ?, ?, ?)",
|
|
||||||
(device_serial, level, message, device_timestamp)
|
|
||||||
)
|
|
||||||
await db.commit()
|
|
||||||
return cursor.lastrowid
|
|
||||||
|
|
||||||
|
|
||||||
async def insert_heartbeat(device_serial: str, device_id: str,
|
|
||||||
firmware_version: str, ip_address: str,
|
|
||||||
gateway: str, uptime_ms: int, uptime_display: str):
|
|
||||||
db = await get_db()
|
|
||||||
cursor = await db.execute(
|
|
||||||
"""INSERT INTO heartbeats
|
|
||||||
(device_serial, device_id, firmware_version, ip_address, gateway, uptime_ms, uptime_display)
|
|
||||||
VALUES (?, ?, ?, ?, ?, ?, ?)""",
|
|
||||||
(device_serial, device_id, firmware_version, ip_address, gateway, uptime_ms, uptime_display)
|
|
||||||
)
|
|
||||||
await db.commit()
|
|
||||||
return cursor.lastrowid
|
|
||||||
|
|
||||||
|
|
||||||
async def insert_command(device_serial: str, command_name: str,
|
|
||||||
command_payload: dict) -> int:
|
|
||||||
db = await get_db()
|
|
||||||
cursor = await db.execute(
|
|
||||||
"INSERT INTO commands (device_serial, command_name, command_payload) VALUES (?, ?, ?)",
|
|
||||||
(device_serial, command_name, json.dumps(command_payload))
|
|
||||||
)
|
|
||||||
await db.commit()
|
|
||||||
return cursor.lastrowid
|
|
||||||
|
|
||||||
|
|
||||||
async def update_command_response(command_id: int, status: str,
|
|
||||||
response_payload: dict | None = None):
|
|
||||||
db = await get_db()
|
|
||||||
await db.execute(
|
|
||||||
"""UPDATE commands SET status = ?, response_payload = ?,
|
|
||||||
responded_at = datetime('now') WHERE id = ?""",
|
|
||||||
(status, json.dumps(response_payload) if response_payload else None, command_id)
|
|
||||||
)
|
|
||||||
await db.commit()
|
|
||||||
|
|
||||||
|
|
||||||
# --- Query Operations ---
|
|
||||||
|
|
||||||
async def get_logs(device_serial: str, level: str | None = None,
|
|
||||||
search: str | None = None,
|
|
||||||
limit: int = 100, offset: int = 0) -> tuple[list, int]:
|
|
||||||
db = await get_db()
|
|
||||||
where_clauses = ["device_serial = ?"]
|
|
||||||
params: list = [device_serial]
|
|
||||||
|
|
||||||
if level:
|
|
||||||
where_clauses.append("level = ?")
|
|
||||||
params.append(level)
|
|
||||||
if search:
|
|
||||||
where_clauses.append("message LIKE ?")
|
|
||||||
params.append(f"%{search}%")
|
|
||||||
|
|
||||||
where = " AND ".join(where_clauses)
|
|
||||||
|
|
||||||
count_row = await db.execute_fetchall(
|
|
||||||
f"SELECT COUNT(*) as cnt FROM device_logs WHERE {where}", params
|
|
||||||
)
|
|
||||||
total = count_row[0][0]
|
|
||||||
|
|
||||||
rows = await db.execute_fetchall(
|
|
||||||
f"SELECT * FROM device_logs WHERE {where} ORDER BY received_at DESC LIMIT ? OFFSET ?",
|
|
||||||
params + [limit, offset]
|
|
||||||
)
|
|
||||||
return [dict(r) for r in rows], total
|
|
||||||
|
|
||||||
|
|
||||||
async def get_heartbeats(device_serial: str, limit: int = 100,
|
|
||||||
offset: int = 0) -> tuple[list, int]:
|
|
||||||
db = await get_db()
|
|
||||||
count_row = await db.execute_fetchall(
|
|
||||||
"SELECT COUNT(*) FROM heartbeats WHERE device_serial = ?", (device_serial,)
|
|
||||||
)
|
|
||||||
total = count_row[0][0]
|
|
||||||
rows = await db.execute_fetchall(
|
|
||||||
"SELECT * FROM heartbeats WHERE device_serial = ? ORDER BY received_at DESC LIMIT ? OFFSET ?",
|
|
||||||
(device_serial, limit, offset)
|
|
||||||
)
|
|
||||||
return [dict(r) for r in rows], total
|
|
||||||
|
|
||||||
|
|
||||||
async def get_commands(device_serial: str, limit: int = 100,
|
|
||||||
offset: int = 0) -> tuple[list, int]:
|
|
||||||
db = await get_db()
|
|
||||||
count_row = await db.execute_fetchall(
|
|
||||||
"SELECT COUNT(*) FROM commands WHERE device_serial = ?", (device_serial,)
|
|
||||||
)
|
|
||||||
total = count_row[0][0]
|
|
||||||
rows = await db.execute_fetchall(
|
|
||||||
"SELECT * FROM commands WHERE device_serial = ? ORDER BY sent_at DESC LIMIT ? OFFSET ?",
|
|
||||||
(device_serial, limit, offset)
|
|
||||||
)
|
|
||||||
return [dict(r) for r in rows], total
|
|
||||||
|
|
||||||
|
|
||||||
async def get_latest_heartbeats() -> list:
|
|
||||||
db = await get_db()
|
|
||||||
rows = await db.execute_fetchall("""
|
|
||||||
SELECT h.* FROM heartbeats h
|
|
||||||
INNER JOIN (
|
|
||||||
SELECT device_serial, MAX(received_at) as max_time
|
|
||||||
FROM heartbeats GROUP BY device_serial
|
|
||||||
) latest ON h.device_serial = latest.device_serial
|
|
||||||
AND h.received_at = latest.max_time
|
|
||||||
""")
|
|
||||||
return [dict(r) for r in rows]
|
|
||||||
|
|
||||||
|
|
||||||
async def get_pending_command(device_serial: str) -> dict | None:
|
|
||||||
db = await get_db()
|
|
||||||
rows = await db.execute_fetchall(
|
|
||||||
"""SELECT * FROM commands WHERE device_serial = ? AND status = 'pending'
|
|
||||||
ORDER BY sent_at DESC LIMIT 1""",
|
|
||||||
(device_serial,)
|
|
||||||
)
|
|
||||||
return dict(rows[0]) if rows else None
|
|
||||||
|
|
||||||
|
|
||||||
# --- Cleanup ---
|
|
||||||
|
|
||||||
async def purge_old_data(retention_days: int | None = None):
|
|
||||||
days = retention_days or settings.mqtt_data_retention_days
|
|
||||||
cutoff = (datetime.now(timezone.utc) - timedelta(days=days)).isoformat()
|
|
||||||
db = await get_db()
|
|
||||||
await db.execute("DELETE FROM device_logs WHERE received_at < ?", (cutoff,))
|
|
||||||
await db.execute("DELETE FROM heartbeats WHERE received_at < ?", (cutoff,))
|
|
||||||
await db.execute("DELETE FROM commands WHERE sent_at < ?", (cutoff,))
|
|
||||||
await db.commit()
|
|
||||||
logger.info(f"Purged MQTT data older than {days} days")
|
|
||||||
|
|
||||||
|
|
||||||
async def purge_loop():
|
|
||||||
while True:
|
|
||||||
await asyncio.sleep(86400)
|
|
||||||
try:
|
|
||||||
await purge_old_data()
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Purge failed: {e}")
|
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
import logging
|
import logging
|
||||||
from mqtt import database as db
|
import database as db
|
||||||
|
|
||||||
logger = logging.getLogger("mqtt.logger")
|
logger = logging.getLogger("mqtt.logger")
|
||||||
|
|
||||||
@@ -18,6 +18,10 @@ async def handle_message(serial: str, topic_type: str, payload: dict):
|
|||||||
try:
|
try:
|
||||||
if topic_type == "status/heartbeat":
|
if topic_type == "status/heartbeat":
|
||||||
await _handle_heartbeat(serial, payload)
|
await _handle_heartbeat(serial, payload)
|
||||||
|
elif topic_type == "status/alerts":
|
||||||
|
await _handle_alerts(serial, payload)
|
||||||
|
elif topic_type == "status/info":
|
||||||
|
await _handle_info(serial, payload)
|
||||||
elif topic_type == "logs":
|
elif topic_type == "logs":
|
||||||
await _handle_log(serial, payload)
|
await _handle_log(serial, payload)
|
||||||
elif topic_type == "data":
|
elif topic_type == "data":
|
||||||
@@ -29,6 +33,8 @@ async def handle_message(serial: str, topic_type: str, payload: dict):
|
|||||||
|
|
||||||
|
|
||||||
async def _handle_heartbeat(serial: str, payload: dict):
|
async def _handle_heartbeat(serial: str, payload: dict):
|
||||||
|
# Store silently — do not log as a visible event.
|
||||||
|
# The console surfaces an alert only when the device goes silent (no heartbeat for 90s).
|
||||||
inner = payload.get("payload", {})
|
inner = payload.get("payload", {})
|
||||||
await db.insert_heartbeat(
|
await db.insert_heartbeat(
|
||||||
device_serial=serial,
|
device_serial=serial,
|
||||||
@@ -55,6 +61,31 @@ async def _handle_log(serial: str, payload: dict):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def _handle_alerts(serial: str, payload: dict):
|
||||||
|
subsystem = payload.get("subsystem", "")
|
||||||
|
state = payload.get("state", "")
|
||||||
|
if not subsystem or not state:
|
||||||
|
logger.warning(f"Malformed alert payload from {serial}: {payload}")
|
||||||
|
return
|
||||||
|
|
||||||
|
if state == "CLEARED":
|
||||||
|
await db.delete_alert(serial, subsystem)
|
||||||
|
else:
|
||||||
|
await db.upsert_alert(serial, subsystem, state, payload.get("msg"))
|
||||||
|
|
||||||
|
|
||||||
|
async def _handle_info(serial: str, payload: dict):
|
||||||
|
event_type = payload.get("type", "")
|
||||||
|
data = payload.get("payload", {})
|
||||||
|
|
||||||
|
if event_type == "playback_started":
|
||||||
|
logger.debug(f"{serial}: playback started — melody_uid={data.get('melody_uid')}")
|
||||||
|
elif event_type == "playback_stopped":
|
||||||
|
logger.debug(f"{serial}: playback stopped")
|
||||||
|
else:
|
||||||
|
logger.debug(f"{serial}: info event '{event_type}'")
|
||||||
|
|
||||||
|
|
||||||
async def _handle_data_response(serial: str, payload: dict):
|
async def _handle_data_response(serial: str, payload: dict):
|
||||||
status = payload.get("status", "")
|
status = payload.get("status", "")
|
||||||
|
|
||||||
|
|||||||
@@ -84,3 +84,15 @@ class CommandSendResponse(BaseModel):
|
|||||||
success: bool
|
success: bool
|
||||||
command_id: int
|
command_id: int
|
||||||
message: str
|
message: str
|
||||||
|
|
||||||
|
|
||||||
|
class DeviceAlertEntry(BaseModel):
|
||||||
|
device_serial: str
|
||||||
|
subsystem: str
|
||||||
|
state: str
|
||||||
|
message: Optional[str] = None
|
||||||
|
updated_at: str
|
||||||
|
|
||||||
|
|
||||||
|
class DeviceAlertsResponse(BaseModel):
|
||||||
|
alerts: List[DeviceAlertEntry]
|
||||||
|
|||||||
@@ -1,52 +1,17 @@
|
|||||||
import subprocess
|
"""
|
||||||
import os
|
mqtt/mosquitto.py — no-ops since Stage 5.
|
||||||
from config import settings
|
|
||||||
|
Auth is now HMAC-based via the go-auth HTTP plugin.
|
||||||
|
These functions are kept as no-ops so existing call sites don't break.
|
||||||
|
They can be removed entirely in Phase 6 cleanup.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
def register_device_password(serial_number: str, password: str) -> bool:
|
def register_device_password(serial_number: str, password: str) -> bool:
|
||||||
"""Register a device in the Mosquitto password file.
|
"""No-op. HMAC auth is derived on demand — no registration needed."""
|
||||||
|
return True
|
||||||
Uses mosquitto_passwd to add/update the device credentials.
|
|
||||||
The serial number is used as the MQTT username.
|
|
||||||
Returns True on success, False on failure.
|
|
||||||
"""
|
|
||||||
passwd_file = settings.mosquitto_password_file
|
|
||||||
|
|
||||||
# Ensure the password file exists
|
|
||||||
if not os.path.exists(passwd_file):
|
|
||||||
# Create the file if it doesn't exist
|
|
||||||
os.makedirs(os.path.dirname(passwd_file), exist_ok=True)
|
|
||||||
open(passwd_file, "a").close()
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Use mosquitto_passwd with -b flag (batch mode) to set password
|
|
||||||
result = subprocess.run(
|
|
||||||
["mosquitto_passwd", "-b", passwd_file, serial_number, password],
|
|
||||||
capture_output=True,
|
|
||||||
text=True,
|
|
||||||
timeout=10,
|
|
||||||
)
|
|
||||||
return result.returncode == 0
|
|
||||||
except (subprocess.TimeoutExpired, FileNotFoundError) as e:
|
|
||||||
print(f"[WARNING] Mosquitto password registration failed: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def remove_device_password(serial_number: str) -> bool:
|
def remove_device_password(serial_number: str) -> bool:
|
||||||
"""Remove a device from the Mosquitto password file."""
|
"""No-op. HMAC auth is derived on demand — no removal needed."""
|
||||||
passwd_file = settings.mosquitto_password_file
|
return True
|
||||||
|
|
||||||
if not os.path.exists(passwd_file):
|
|
||||||
return True
|
|
||||||
|
|
||||||
try:
|
|
||||||
result = subprocess.run(
|
|
||||||
["mosquitto_passwd", "-D", passwd_file, serial_number],
|
|
||||||
capture_output=True,
|
|
||||||
text=True,
|
|
||||||
timeout=10,
|
|
||||||
)
|
|
||||||
return result.returncode == 0
|
|
||||||
except (subprocess.TimeoutExpired, FileNotFoundError) as e:
|
|
||||||
print(f"[WARNING] Mosquitto password removal failed: {e}")
|
|
||||||
return False
|
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ from mqtt.models import (
|
|||||||
CommandListResponse, HeartbeatEntry,
|
CommandListResponse, HeartbeatEntry,
|
||||||
)
|
)
|
||||||
from mqtt.client import mqtt_manager
|
from mqtt.client import mqtt_manager
|
||||||
from mqtt import database as db
|
import database as db
|
||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
|
|
||||||
router = APIRouter(prefix="/api/mqtt", tags=["mqtt"])
|
router = APIRouter(prefix="/api/mqtt", tags=["mqtt"])
|
||||||
|
|||||||
0
backend/public/__init__.py
Normal file
214
backend/public/router.py
Normal file
@@ -0,0 +1,214 @@
|
|||||||
|
"""
|
||||||
|
Public (no-auth) endpoints for CloudFlash and feature gate checks.
|
||||||
|
"""
|
||||||
|
from fastapi import APIRouter, HTTPException
|
||||||
|
from fastapi.responses import Response
|
||||||
|
from pydantic import BaseModel
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
|
from settings.public_features_service import get_public_features
|
||||||
|
from firmware.service import list_firmware
|
||||||
|
from utils.nvs_generator import generate as generate_nvs
|
||||||
|
from manufacturing.service import get_device_by_sn
|
||||||
|
from shared.exceptions import NotFoundError
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/api/public", tags=["public"])
|
||||||
|
|
||||||
|
|
||||||
|
# ── Feature gate ──────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
class CloudFlashStatus(BaseModel):
|
||||||
|
enabled: bool
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/cloudflash/status", response_model=CloudFlashStatus)
|
||||||
|
async def cloudflash_status():
|
||||||
|
"""Returns whether the CloudFlash public page is currently enabled."""
|
||||||
|
settings = get_public_features()
|
||||||
|
return CloudFlashStatus(enabled=settings.cloudflash_enabled)
|
||||||
|
|
||||||
|
|
||||||
|
def _require_cloudflash_enabled():
|
||||||
|
"""Raises 403 if CloudFlash is disabled."""
|
||||||
|
settings = get_public_features()
|
||||||
|
if not settings.cloudflash_enabled:
|
||||||
|
raise HTTPException(status_code=403, detail="CloudFlash is currently disabled.")
|
||||||
|
|
||||||
|
|
||||||
|
# ── Public firmware list ───────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
class PublicFirmwareOption(BaseModel):
|
||||||
|
hw_type: str
|
||||||
|
hw_type_label: str
|
||||||
|
channel: str
|
||||||
|
version: str
|
||||||
|
download_url: str
|
||||||
|
|
||||||
|
|
||||||
|
HW_TYPE_LABELS = {
|
||||||
|
"vesper": "Vesper",
|
||||||
|
"vesper_plus": "Vesper Plus",
|
||||||
|
"vesper_pro": "Vesper Pro",
|
||||||
|
"agnus": "Agnus",
|
||||||
|
"agnus_mini": "Agnus Mini",
|
||||||
|
"chronos": "Chronos",
|
||||||
|
"chronos_pro": "Chronos Pro",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/cloudflash/firmware", response_model=List[PublicFirmwareOption])
|
||||||
|
async def list_public_firmware():
|
||||||
|
"""
|
||||||
|
Returns all available firmware options (is_latest=True, non-bespoke, stable channel only).
|
||||||
|
No authentication required — used by the public CloudFlash page.
|
||||||
|
"""
|
||||||
|
_require_cloudflash_enabled()
|
||||||
|
|
||||||
|
all_fw = list_firmware()
|
||||||
|
options = []
|
||||||
|
for fw in all_fw:
|
||||||
|
if not fw.is_latest:
|
||||||
|
continue
|
||||||
|
if fw.hw_type == "bespoke":
|
||||||
|
continue
|
||||||
|
if fw.channel != "stable":
|
||||||
|
continue
|
||||||
|
options.append(PublicFirmwareOption(
|
||||||
|
hw_type=fw.hw_type,
|
||||||
|
hw_type_label=HW_TYPE_LABELS.get(fw.hw_type, fw.hw_type.replace("_", " ").title()),
|
||||||
|
channel=fw.channel,
|
||||||
|
version=fw.version,
|
||||||
|
download_url=f"/api/firmware/{fw.hw_type}/{fw.channel}/{fw.version}/firmware.bin",
|
||||||
|
))
|
||||||
|
|
||||||
|
# Sort by hw_type label
|
||||||
|
options.sort(key=lambda x: x.hw_type_label)
|
||||||
|
return options
|
||||||
|
|
||||||
|
|
||||||
|
# ── Public serial number validation ──────────────────────────────────────────
|
||||||
|
|
||||||
|
class SerialValidationResult(BaseModel):
|
||||||
|
valid: bool
|
||||||
|
hw_type: Optional[str] = None
|
||||||
|
hw_type_label: Optional[str] = None
|
||||||
|
hw_version: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/cloudflash/validate-serial/{serial_number}", response_model=SerialValidationResult)
|
||||||
|
async def validate_serial(serial_number: str):
|
||||||
|
"""
|
||||||
|
Check whether a serial number exists in the device database.
|
||||||
|
Returns hw_type info if found so the frontend can confirm it matches the user's selection.
|
||||||
|
No sensitive device data is returned.
|
||||||
|
"""
|
||||||
|
_require_cloudflash_enabled()
|
||||||
|
|
||||||
|
sn = serial_number.strip().upper()
|
||||||
|
try:
|
||||||
|
device = get_device_by_sn(sn)
|
||||||
|
return SerialValidationResult(
|
||||||
|
valid=True,
|
||||||
|
hw_type=device.hw_type,
|
||||||
|
hw_type_label=HW_TYPE_LABELS.get(device.hw_type, device.hw_type.replace("_", " ").title()),
|
||||||
|
hw_version=device.hw_version,
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
return SerialValidationResult(valid=False)
|
||||||
|
|
||||||
|
|
||||||
|
# ── Public NVS generation ─────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
class NvsRequest(BaseModel):
|
||||||
|
serial_number: str
|
||||||
|
hw_type: str
|
||||||
|
hw_revision: str
|
||||||
|
nvs_schema: str = "new" # "legacy" | "new"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def legacy(self) -> bool:
|
||||||
|
return self.nvs_schema == "legacy"
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/cloudflash/nvs.bin")
|
||||||
|
async def generate_public_nvs(body: NvsRequest):
|
||||||
|
"""
|
||||||
|
Generate an NVS binary for a given serial number + hardware info.
|
||||||
|
No authentication required — used by the public CloudFlash page for Full Wipe flash.
|
||||||
|
The serial number is provided by the user (they read it from the sticker on their device).
|
||||||
|
"""
|
||||||
|
_require_cloudflash_enabled()
|
||||||
|
|
||||||
|
sn = body.serial_number.strip().upper()
|
||||||
|
if not sn:
|
||||||
|
raise HTTPException(status_code=422, detail="Serial number is required.")
|
||||||
|
|
||||||
|
hw_type = body.hw_type.strip().lower()
|
||||||
|
hw_revision = body.hw_revision.strip()
|
||||||
|
|
||||||
|
if not hw_type or not hw_revision:
|
||||||
|
raise HTTPException(status_code=422, detail="hw_type and hw_revision are required.")
|
||||||
|
|
||||||
|
try:
|
||||||
|
nvs_bytes = generate_nvs(
|
||||||
|
serial_number=sn,
|
||||||
|
hw_family=hw_type,
|
||||||
|
hw_revision=hw_revision,
|
||||||
|
legacy=body.legacy,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=f"NVS generation failed: {str(e)}")
|
||||||
|
|
||||||
|
return Response(
|
||||||
|
content=nvs_bytes,
|
||||||
|
media_type="application/octet-stream",
|
||||||
|
headers={"Content-Disposition": f'attachment; filename="{sn}_nvs.bin"'},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# ── Public flash assets (bootloader + partitions) ─────────────────────────────
|
||||||
|
|
||||||
|
@router.get("/cloudflash/{hw_type}/bootloader.bin")
|
||||||
|
async def get_public_bootloader(hw_type: str):
|
||||||
|
"""
|
||||||
|
Serve the bootloader binary for a given hw_type.
|
||||||
|
No authentication required — used by the public CloudFlash page.
|
||||||
|
"""
|
||||||
|
_require_cloudflash_enabled()
|
||||||
|
|
||||||
|
import os
|
||||||
|
from config import settings as cfg
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
asset_path = Path(cfg.flash_assets_storage_path) / hw_type / "bootloader.bin"
|
||||||
|
if not asset_path.exists():
|
||||||
|
raise HTTPException(status_code=404, detail=f"Bootloader not found for {hw_type}.")
|
||||||
|
|
||||||
|
return Response(
|
||||||
|
content=asset_path.read_bytes(),
|
||||||
|
media_type="application/octet-stream",
|
||||||
|
headers={"Content-Disposition": f'attachment; filename="bootloader_{hw_type}.bin"'},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/cloudflash/{hw_type}/partitions.bin")
|
||||||
|
async def get_public_partitions(hw_type: str):
|
||||||
|
"""
|
||||||
|
Serve the partition table binary for a given hw_type.
|
||||||
|
No authentication required — used by the public CloudFlash page.
|
||||||
|
"""
|
||||||
|
_require_cloudflash_enabled()
|
||||||
|
|
||||||
|
import os
|
||||||
|
from config import settings as cfg
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
asset_path = Path(cfg.flash_assets_storage_path) / hw_type / "partitions.bin"
|
||||||
|
if not asset_path.exists():
|
||||||
|
raise HTTPException(status_code=404, detail=f"Partition table not found for {hw_type}.")
|
||||||
|
|
||||||
|
return Response(
|
||||||
|
content=asset_path.read_bytes(),
|
||||||
|
media_type="application/octet-stream",
|
||||||
|
headers={"Content-Disposition": f'attachment; filename="partitions_{hw_type}.bin"'},
|
||||||
|
)
|
||||||
@@ -8,4 +8,10 @@ python-jose[cryptography]==3.3.0
|
|||||||
passlib[bcrypt]==1.7.4
|
passlib[bcrypt]==1.7.4
|
||||||
python-multipart==0.0.20
|
python-multipart==0.0.20
|
||||||
bcrypt==4.0.1
|
bcrypt==4.0.1
|
||||||
aiosqlite==0.20.0
|
aiosqlite==0.20.0
|
||||||
|
resend==2.10.0
|
||||||
|
httpx>=0.27.0
|
||||||
|
weasyprint>=62.0
|
||||||
|
jinja2>=3.1.0
|
||||||
|
Pillow>=10.0.0
|
||||||
|
pdf2image>=1.17.0
|
||||||
@@ -1,11 +1,19 @@
|
|||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
|
|
||||||
|
DEFAULT_NOTE_ASSIGNMENT_COLORS: List[str] = [
|
||||||
|
"#67E8F9", "#5EEAD4", "#6EE7B7", "#86EFAC",
|
||||||
|
"#BEF264", "#FDE68A", "#FCD34D", "#FBBF24",
|
||||||
|
"#FDBA74", "#FB923C", "#F97316", "#FB7185",
|
||||||
|
"#F87171", "#EF4444", "#DC2626", "#B91C1C",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
class MelodySettings(BaseModel):
|
class MelodySettings(BaseModel):
|
||||||
available_languages: List[str] = ["en", "el", "sr"]
|
available_languages: List[str] = ["en", "el", "sr"]
|
||||||
primary_language: str = "en"
|
primary_language: str = "en"
|
||||||
quick_colors: List[str] = ["#FF5733", "#33FF57", "#3357FF", "#FFD700", "#FF69B4", "#8B4513"]
|
quick_colors: List[str] = ["#FF5733", "#33FF57", "#3357FF", "#FFD700", "#FF69B4", "#8B4513"]
|
||||||
|
note_assignment_colors: List[str] = DEFAULT_NOTE_ASSIGNMENT_COLORS
|
||||||
duration_values: List[int] = [
|
duration_values: List[int] = [
|
||||||
0, 15, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180,
|
0, 15, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180,
|
||||||
240, 300, 360, 420, 480, 540, 600, 900,
|
240, 300, 360, 420, 480, 540, 600, 900,
|
||||||
@@ -16,4 +24,5 @@ class MelodySettingsUpdate(BaseModel):
|
|||||||
available_languages: Optional[List[str]] = None
|
available_languages: Optional[List[str]] = None
|
||||||
primary_language: Optional[str] = None
|
primary_language: Optional[str] = None
|
||||||
quick_colors: Optional[List[str]] = None
|
quick_colors: Optional[List[str]] = None
|
||||||
|
note_assignment_colors: Optional[List[str]] = None
|
||||||
duration_values: Optional[List[int]] = None
|
duration_values: Optional[List[int]] = None
|
||||||
|
|||||||
10
backend/settings/public_features_models.py
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
from pydantic import BaseModel
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
|
||||||
|
class PublicFeaturesSettings(BaseModel):
|
||||||
|
cloudflash_enabled: bool = False
|
||||||
|
|
||||||
|
|
||||||
|
class PublicFeaturesSettingsUpdate(BaseModel):
|
||||||
|
cloudflash_enabled: Optional[bool] = None
|
||||||
31
backend/settings/public_features_service.py
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
from shared.firebase import get_db
|
||||||
|
from settings.public_features_models import PublicFeaturesSettings, PublicFeaturesSettingsUpdate
|
||||||
|
|
||||||
|
COLLECTION = "admin_settings"
|
||||||
|
DOC_ID = "public_features"
|
||||||
|
|
||||||
|
|
||||||
|
def get_public_features() -> PublicFeaturesSettings:
|
||||||
|
"""Get public features settings from Firestore. Creates defaults if not found."""
|
||||||
|
db = get_db()
|
||||||
|
doc = db.collection(COLLECTION).document(DOC_ID).get()
|
||||||
|
if doc.exists:
|
||||||
|
return PublicFeaturesSettings(**doc.to_dict())
|
||||||
|
defaults = PublicFeaturesSettings()
|
||||||
|
db.collection(COLLECTION).document(DOC_ID).set(defaults.model_dump())
|
||||||
|
return defaults
|
||||||
|
|
||||||
|
|
||||||
|
def update_public_features(data: PublicFeaturesSettingsUpdate) -> PublicFeaturesSettings:
|
||||||
|
"""Update public features settings. Only provided fields are updated."""
|
||||||
|
db = get_db()
|
||||||
|
doc_ref = db.collection(COLLECTION).document(DOC_ID)
|
||||||
|
doc = doc_ref.get()
|
||||||
|
|
||||||
|
existing = doc.to_dict() if doc.exists else PublicFeaturesSettings().model_dump()
|
||||||
|
update_data = data.model_dump(exclude_none=True)
|
||||||
|
existing.update(update_data)
|
||||||
|
|
||||||
|
normalized = PublicFeaturesSettings(**existing)
|
||||||
|
doc_ref.set(normalized.model_dump())
|
||||||
|
return normalized
|
||||||
@@ -1,8 +1,11 @@
|
|||||||
from fastapi import APIRouter, Depends
|
from fastapi import APIRouter, Depends
|
||||||
from auth.models import TokenPayload
|
from auth.models import TokenPayload
|
||||||
from auth.dependencies import require_permission
|
from auth.dependencies import require_permission, require_roles
|
||||||
|
from auth.models import Role
|
||||||
from settings.models import MelodySettings, MelodySettingsUpdate
|
from settings.models import MelodySettings, MelodySettingsUpdate
|
||||||
|
from settings.public_features_models import PublicFeaturesSettings, PublicFeaturesSettingsUpdate
|
||||||
from settings import service
|
from settings import service
|
||||||
|
from settings import public_features_service
|
||||||
|
|
||||||
router = APIRouter(prefix="/api/settings", tags=["settings"])
|
router = APIRouter(prefix="/api/settings", tags=["settings"])
|
||||||
|
|
||||||
@@ -20,3 +23,20 @@ async def update_melody_settings(
|
|||||||
_user: TokenPayload = Depends(require_permission("melodies", "edit")),
|
_user: TokenPayload = Depends(require_permission("melodies", "edit")),
|
||||||
):
|
):
|
||||||
return service.update_melody_settings(body)
|
return service.update_melody_settings(body)
|
||||||
|
|
||||||
|
|
||||||
|
# ── Public Features Settings (sysadmin / admin only) ─────────────────────────
|
||||||
|
|
||||||
|
@router.get("/public-features", response_model=PublicFeaturesSettings)
|
||||||
|
async def get_public_features(
|
||||||
|
_user: TokenPayload = Depends(require_roles(Role.sysadmin, Role.admin)),
|
||||||
|
):
|
||||||
|
return public_features_service.get_public_features()
|
||||||
|
|
||||||
|
|
||||||
|
@router.put("/public-features", response_model=PublicFeaturesSettings)
|
||||||
|
async def update_public_features(
|
||||||
|
body: PublicFeaturesSettingsUpdate,
|
||||||
|
_user: TokenPayload = Depends(require_roles(Role.sysadmin, Role.admin)),
|
||||||
|
):
|
||||||
|
return public_features_service.update_public_features(body)
|
||||||
|
|||||||
@@ -10,7 +10,10 @@ def get_melody_settings() -> MelodySettings:
|
|||||||
db = get_db()
|
db = get_db()
|
||||||
doc = db.collection(COLLECTION).document(DOC_ID).get()
|
doc = db.collection(COLLECTION).document(DOC_ID).get()
|
||||||
if doc.exists:
|
if doc.exists:
|
||||||
return MelodySettings(**doc.to_dict())
|
settings = MelodySettings(**doc.to_dict())
|
||||||
|
# Backfill newly introduced defaults into stored settings.
|
||||||
|
db.collection(COLLECTION).document(DOC_ID).set(settings.model_dump())
|
||||||
|
return settings
|
||||||
# Create with defaults
|
# Create with defaults
|
||||||
defaults = MelodySettings()
|
defaults = MelodySettings()
|
||||||
db.collection(COLLECTION).document(DOC_ID).set(defaults.model_dump())
|
db.collection(COLLECTION).document(DOC_ID).set(defaults.model_dump())
|
||||||
@@ -35,5 +38,6 @@ def update_melody_settings(data: MelodySettingsUpdate) -> MelodySettings:
|
|||||||
if "duration_values" in existing:
|
if "duration_values" in existing:
|
||||||
existing["duration_values"] = sorted(existing["duration_values"])
|
existing["duration_values"] = sorted(existing["duration_values"])
|
||||||
|
|
||||||
doc_ref.set(existing)
|
normalized = MelodySettings(**existing)
|
||||||
return MelodySettings(**existing)
|
doc_ref.set(normalized.model_dump())
|
||||||
|
return normalized
|
||||||
|
|||||||
BIN
backend/storage/flash_assets/agnus/bootloader.bin
Normal file
BIN
backend/storage/flash_assets/agnus/partitions.bin
Normal file
|
After Width: | Height: | Size: 21 KiB |
BIN
backend/templates/linktree.png
Normal file
|
After Width: | Height: | Size: 20 KiB |
BIN
backend/templates/logo.png
Normal file
|
After Width: | Height: | Size: 14 KiB |
708
backend/templates/quotation.html
Normal file
@@ -0,0 +1,708 @@
|
|||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="{{ lang }}">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8"/>
|
||||||
|
<title>{% if lang == 'gr' %}Προσφορά{% else %}Quotation{% endif %} {{ quotation.quotation_number }}</title>
|
||||||
|
<style>
|
||||||
|
@import url('https://fonts.googleapis.com/css2?family=Noto+Sans:ital,wght@0,400;0,600;0,700;1,400&display=swap');
|
||||||
|
|
||||||
|
* { box-sizing: border-box; margin: 0; padding: 0; }
|
||||||
|
|
||||||
|
body {
|
||||||
|
font-family: 'Noto Sans', DejaVu Sans, Arial, sans-serif;
|
||||||
|
font-size: 9.5pt;
|
||||||
|
color: #1a1a2e;
|
||||||
|
background: #fff;
|
||||||
|
line-height: 1.45;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
min-height: 100vh;
|
||||||
|
padding-bottom: 36mm;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* pushes notes + validity down toward the fixed footer */
|
||||||
|
.main-content-gap {
|
||||||
|
flex-grow: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
@page {
|
||||||
|
size: A4;
|
||||||
|
margin: 15mm 15mm 15mm 15mm;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ── HEADER ── */
|
||||||
|
.header {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
align-items: flex-start;
|
||||||
|
padding-bottom: 10px;
|
||||||
|
border-bottom: 2.5px solid #5886c4;
|
||||||
|
margin-bottom: 14px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.company-block {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
justify-content: flex-start;
|
||||||
|
}
|
||||||
|
.company-block img.logo {
|
||||||
|
max-height: 70px;
|
||||||
|
max-width: 250px;
|
||||||
|
object-fit: contain;
|
||||||
|
display: block;
|
||||||
|
margin-bottom: 5px;
|
||||||
|
}
|
||||||
|
.company-block p {
|
||||||
|
font-size: 10pt;
|
||||||
|
color: #6b8fc4;
|
||||||
|
margin-top: 1px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.quotation-meta-block {
|
||||||
|
text-align: right;
|
||||||
|
}
|
||||||
|
.quotation-meta-block .doc-type {
|
||||||
|
font-size: 14pt;
|
||||||
|
font-weight: 700;
|
||||||
|
color: #5886c4;
|
||||||
|
text-transform: uppercase;
|
||||||
|
letter-spacing: 1px;
|
||||||
|
margin-bottom: 4px;
|
||||||
|
}
|
||||||
|
.quotation-meta-block .meta-line {
|
||||||
|
font-size: 8.5pt;
|
||||||
|
text-align: right;
|
||||||
|
white-space: nowrap;
|
||||||
|
line-height: 1.6;
|
||||||
|
}
|
||||||
|
.quotation-meta-block .meta-line .meta-label {
|
||||||
|
color: #7a9cc8;
|
||||||
|
}
|
||||||
|
.quotation-meta-block .meta-line .meta-value {
|
||||||
|
font-weight: 600;
|
||||||
|
color: #1a1a2e;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ── CLIENT + ORDER META ── */
|
||||||
|
.info-row {
|
||||||
|
display: flex;
|
||||||
|
align-items: stretch;
|
||||||
|
gap: 16px;
|
||||||
|
margin-bottom: 12px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.client-block, .order-block {
|
||||||
|
border: 1px solid #c2d4ec;
|
||||||
|
border-radius: 5px;
|
||||||
|
padding: 6px 10px;
|
||||||
|
}
|
||||||
|
.client-block { flex: 65; }
|
||||||
|
.order-block { flex: 35; }
|
||||||
|
|
||||||
|
.block-title {
|
||||||
|
font-size: 7.5pt;
|
||||||
|
font-weight: 700;
|
||||||
|
text-transform: uppercase;
|
||||||
|
color: #5886c4;
|
||||||
|
letter-spacing: 0.5px;
|
||||||
|
margin-bottom: 3px;
|
||||||
|
border-bottom: 1px solid #dce9f7;
|
||||||
|
padding-bottom: 2px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.info-row table.fields {
|
||||||
|
border-collapse: collapse;
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
.info-row table.fields td {
|
||||||
|
padding: 1px 0;
|
||||||
|
vertical-align: top;
|
||||||
|
}
|
||||||
|
.info-row table.fields td.lbl {
|
||||||
|
font-size: 8pt;
|
||||||
|
color: #7a9cc8;
|
||||||
|
white-space: nowrap;
|
||||||
|
padding-right: 8px;
|
||||||
|
}
|
||||||
|
.info-row table.fields td.val {
|
||||||
|
font-size: 8.5pt;
|
||||||
|
font-weight: 500;
|
||||||
|
color: #1a1a2e;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ── TITLE / SUBTITLE ── */
|
||||||
|
.quotation-title {
|
||||||
|
margin-bottom: 10px;
|
||||||
|
}
|
||||||
|
.quotation-title h2 {
|
||||||
|
font-size: 13pt;
|
||||||
|
font-weight: 700;
|
||||||
|
color: #3a6aad;
|
||||||
|
}
|
||||||
|
.quotation-title p {
|
||||||
|
font-size: 9pt;
|
||||||
|
color: #555;
|
||||||
|
margin-top: 2px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ── ITEMS TABLE ── */
|
||||||
|
.items-table {
|
||||||
|
width: 100%;
|
||||||
|
border-collapse: collapse;
|
||||||
|
margin-bottom: 0;
|
||||||
|
font-size: 8.5pt;
|
||||||
|
}
|
||||||
|
.items-table thead tr {
|
||||||
|
background: #5886c4;
|
||||||
|
color: #fff;
|
||||||
|
}
|
||||||
|
.items-table thead th {
|
||||||
|
padding: 6px 8px;
|
||||||
|
text-align: left;
|
||||||
|
font-weight: 600;
|
||||||
|
font-size: 8pt;
|
||||||
|
text-transform: uppercase;
|
||||||
|
letter-spacing: 0.3px;
|
||||||
|
}
|
||||||
|
.items-table thead th.right { text-align: right; }
|
||||||
|
.items-table thead th.center { text-align: center; }
|
||||||
|
|
||||||
|
.items-table tbody tr:nth-child(even) { background: #eef4fc; }
|
||||||
|
.items-table tbody tr:nth-child(odd) { background: #fff; }
|
||||||
|
|
||||||
|
.items-table tbody td {
|
||||||
|
padding: 5px 8px;
|
||||||
|
border-bottom: 1px solid #dce9f7;
|
||||||
|
vertical-align: middle;
|
||||||
|
}
|
||||||
|
.items-table tbody td.right { text-align: right; }
|
||||||
|
.items-table tbody td.center { text-align: center; }
|
||||||
|
.items-table tbody td.muted { color: #7a9cc8; font-size: 8pt; }
|
||||||
|
|
||||||
|
/* Special rows for shipping/install */
|
||||||
|
.items-table tbody tr.special-row td {
|
||||||
|
background: #edf3fb;
|
||||||
|
border-top: 1px solid #c2d4ec;
|
||||||
|
border-bottom: 1px solid #c2d4ec;
|
||||||
|
font-style: italic;
|
||||||
|
color: #3a6aad;
|
||||||
|
}
|
||||||
|
.items-table tbody tr.special-spacer td {
|
||||||
|
height: 6px;
|
||||||
|
background: #f4f8fd;
|
||||||
|
border: none;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ── BELOW TABLE ROW: VAT notice + totals ── */
|
||||||
|
.below-table {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
align-items: flex-start;
|
||||||
|
margin-top: 0;
|
||||||
|
margin-bottom: 14px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.vat-notice {
|
||||||
|
flex: 1;
|
||||||
|
padding-top: 8px;
|
||||||
|
padding-right: 16px;
|
||||||
|
}
|
||||||
|
.vat-notice p {
|
||||||
|
font-size: 8pt;
|
||||||
|
font-weight: 700;
|
||||||
|
color: #3a6aad;
|
||||||
|
text-transform: uppercase;
|
||||||
|
letter-spacing: 0.3px;
|
||||||
|
border-left: 3px solid #5886c4;
|
||||||
|
padding-left: 7px;
|
||||||
|
padding-top: 2px;
|
||||||
|
padding-bottom: 2px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ── TOTALS ── */
|
||||||
|
.totals-table {
|
||||||
|
width: 280px;
|
||||||
|
border-collapse: collapse;
|
||||||
|
font-size: 8.5pt;
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
.totals-table td {
|
||||||
|
padding: 4px 10px;
|
||||||
|
border-bottom: 1px solid #dce9f7;
|
||||||
|
}
|
||||||
|
.totals-table .label { color: #555; text-align: right; }
|
||||||
|
.totals-table .value { text-align: right; font-weight: 500; min-width: 90px; }
|
||||||
|
.totals-table .discount-row { color: #c0392b; }
|
||||||
|
.totals-table .new-subtotal-row td { font-size: 10pt; font-weight: 700; color: #1a1a2e; }
|
||||||
|
.totals-table .vat-row td { color: #7a9cc8; font-style: italic; }
|
||||||
|
.totals-table .final-row td {
|
||||||
|
font-size: 11pt;
|
||||||
|
font-weight: 700;
|
||||||
|
color: #3a6aad;
|
||||||
|
border-top: 2px solid #5886c4;
|
||||||
|
border-bottom: none;
|
||||||
|
padding-top: 6px;
|
||||||
|
padding-bottom: 6px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ── COMMENTS ── */
|
||||||
|
.comments-section {
|
||||||
|
margin-bottom: 14px;
|
||||||
|
}
|
||||||
|
.comments-section .section-title {
|
||||||
|
font-size: 8pt;
|
||||||
|
font-weight: 700;
|
||||||
|
text-transform: uppercase;
|
||||||
|
color: #5886c4;
|
||||||
|
letter-spacing: 0.5px;
|
||||||
|
margin-bottom: 5px;
|
||||||
|
}
|
||||||
|
.comments-section ul {
|
||||||
|
padding-left: 14px;
|
||||||
|
}
|
||||||
|
.comments-section li {
|
||||||
|
font-size: 8.5pt;
|
||||||
|
color: #333;
|
||||||
|
margin-bottom: 3px;
|
||||||
|
line-height: 1.4;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ── FOOTER (validity line only) ── */
|
||||||
|
.footer {
|
||||||
|
border-top: 1px solid #c2d4ec;
|
||||||
|
padding-top: 7px;
|
||||||
|
margin-top: 10px;
|
||||||
|
margin-bottom: 10px;
|
||||||
|
}
|
||||||
|
.footer .validity {
|
||||||
|
font-size: 7.5pt;
|
||||||
|
font-style: italic;
|
||||||
|
color: #7a9cc8;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ── FIXED BOTTOM FOOTER (repeats on every page) ── */
|
||||||
|
.fixed-footer {
|
||||||
|
position: fixed;
|
||||||
|
bottom: 0;
|
||||||
|
left: 0;
|
||||||
|
right: 0;
|
||||||
|
padding: 8px 0 0 0;
|
||||||
|
border-top: 1.5px solid #5886c4;
|
||||||
|
display: flex;
|
||||||
|
align-items: stretch;
|
||||||
|
gap: 20px;
|
||||||
|
background: #fff;
|
||||||
|
}
|
||||||
|
.footer-block {
|
||||||
|
width: 30%;
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
.footer-block-title {
|
||||||
|
font-size: 7pt;
|
||||||
|
font-weight: 700;
|
||||||
|
text-transform: uppercase;
|
||||||
|
color: #5886c4;
|
||||||
|
letter-spacing: 0.4px;
|
||||||
|
margin-bottom: 4px;
|
||||||
|
border-bottom: 1px solid #dce9f7;
|
||||||
|
padding-bottom: 2px;
|
||||||
|
}
|
||||||
|
.footer-block dl {
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: max-content 1fr;
|
||||||
|
gap: 2px 6px;
|
||||||
|
padding-left: 0;
|
||||||
|
margin-left: 0;
|
||||||
|
}
|
||||||
|
.footer-block dt {
|
||||||
|
font-size: 7pt;
|
||||||
|
color: #7a9cc8;
|
||||||
|
white-space: nowrap;
|
||||||
|
}
|
||||||
|
.footer-block dd {
|
||||||
|
font-size: 7pt;
|
||||||
|
color: #1a1a2e;
|
||||||
|
font-weight: 500;
|
||||||
|
}
|
||||||
|
.footer-ref {
|
||||||
|
margin-left: auto;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
justify-content: flex-end;
|
||||||
|
align-items: flex-end;
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
.footer-ref .ref-quot {
|
||||||
|
font-size: 7.5pt;
|
||||||
|
font-weight: 700;
|
||||||
|
color: #5886c4;
|
||||||
|
line-height: 1.4;
|
||||||
|
}
|
||||||
|
.footer-ref .ref-page {
|
||||||
|
font-size: 7pt;
|
||||||
|
color: #7a9cc8;
|
||||||
|
line-height: 1.4;
|
||||||
|
}
|
||||||
|
.footer-ref .ref-page::after {
|
||||||
|
content: counter(page) " / " counter(pages);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ── UTILS ── */
|
||||||
|
.text-muted { color: #aaa; }
|
||||||
|
.dash { color: #ccc; }
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
|
||||||
|
{# ── Bilingual labels ── #}
|
||||||
|
{% if lang == 'gr' %}
|
||||||
|
{% set L_QUOTATION = "ΠΡΟΣΦΟΡΑ" %}
|
||||||
|
{% set L_NUMBER = "Αριθμός" %}
|
||||||
|
{% set L_DATE = "Ημερομηνία" %}
|
||||||
|
{% set L_CLIENT = "ΣΤΟΙΧΕΙΑ ΠΕΛΑΤΗ" %}
|
||||||
|
{% set L_ORDER_META = "ΣΤΟΙΧΕΙΑ ΠΑΡΑΓΓΕΛΙΑΣ" %}
|
||||||
|
{% set L_ORDER_TYPE = "Τύπος" %}
|
||||||
|
{% set L_SHIP_METHOD = "Τρ. Αποστολής" %}
|
||||||
|
{% set L_SHIP_DATE = "Εκτιμώμενη Παράδοση" %}
|
||||||
|
{% set L_DESC = "Περιγραφή" %}
|
||||||
|
{% set L_UNIT_COST = "Τιμή Μον." %}
|
||||||
|
{% set L_DISC = "Έκπτ." %}
|
||||||
|
{% set L_QTY = "Ποσ." %}
|
||||||
|
{% set L_UNIT = "Μον." %}
|
||||||
|
{% set L_VAT_COL = "Φ.Π.Α." %}
|
||||||
|
{% set L_TOTAL = "Σύνολο" %}
|
||||||
|
{% set L_SUBTOTAL = "Υποσύνολο" %}
|
||||||
|
{% set L_GLOBAL_DISC = quotation.global_discount_label or "Έκπτωση" %}
|
||||||
|
{% set L_NEW_SUBTOTAL = "Νέο Υποσύνολο" %}
|
||||||
|
{% set L_VAT = "ΣΥΝΟΛΟ Φ.Π.Α." %}
|
||||||
|
{% set L_SHIPPING_COST = "Μεταφορικά / Shipping" %}
|
||||||
|
{% set L_INSTALL_COST = "Εγκατάσταση / Installation" %}
|
||||||
|
{% set L_EXTRAS = quotation.extras_label or "Άλλα" %}
|
||||||
|
{% set L_FINAL = "ΣΥΝΟΛΟ ΠΛΗΡΩΤΕΟ" %}
|
||||||
|
{% set L_COMMENTS = "ΣΗΜΕΙΩΣΕΙΣ" %}
|
||||||
|
{% set L_VALIDITY = "Η προσφορά ισχύει για 30 ημέρες από την ημερομηνία έκδοσής της." %}
|
||||||
|
{% set L_ORG = "Φορέας" %}
|
||||||
|
{% set L_CONTACT = "Επικοινωνία" %}
|
||||||
|
{% set L_ADDRESS = "Διεύθυνση" %}
|
||||||
|
{% set L_PHONE = "Τηλέφωνο" %}
|
||||||
|
{% set L_COMPANY_ADDR = "Ε.Ο. Αντιρρίου Ιωαννίνων 23, Αγρίνιο, 30131" %}
|
||||||
|
{% set L_CONTACT_INFO = "ΣΤΟΙΧΕΙΑ ΕΠΙΚΟΙΝΩΝΙΑΣ" %}
|
||||||
|
{% set L_PAYMENT_INFO = "ΣΤΟΙΧΕΙΑ ΠΛΗΡΩΜΗΣ" %}
|
||||||
|
{% else %}
|
||||||
|
{% set L_QUOTATION = "QUOTATION" %}
|
||||||
|
{% set L_NUMBER = "Number" %}
|
||||||
|
{% set L_DATE = "Date" %}
|
||||||
|
{% set L_CLIENT = "CLIENT DETAILS" %}
|
||||||
|
{% set L_ORDER_META = "ORDER DETAILS" %}
|
||||||
|
{% set L_ORDER_TYPE = "Order Type" %}
|
||||||
|
{% set L_SHIP_METHOD = "Ship. Method" %}
|
||||||
|
{% set L_SHIP_DATE = "Est. Delivery" %}
|
||||||
|
{% set L_DESC = "Description" %}
|
||||||
|
{% set L_UNIT_COST = "Unit Cost" %}
|
||||||
|
{% set L_DISC = "Disc." %}
|
||||||
|
{% set L_QTY = "Qty" %}
|
||||||
|
{% set L_UNIT = "Unit" %}
|
||||||
|
{% set L_VAT_COL = "VAT" %}
|
||||||
|
{% set L_TOTAL = "Total" %}
|
||||||
|
{% set L_SUBTOTAL = "Subtotal" %}
|
||||||
|
{% set L_GLOBAL_DISC = quotation.global_discount_label or "Discount" %}
|
||||||
|
{% set L_NEW_SUBTOTAL = "New Subtotal" %}
|
||||||
|
{% set L_VAT = "Total VAT" %}
|
||||||
|
{% set L_SHIPPING_COST = "Shipping / Transport" %}
|
||||||
|
{% set L_INSTALL_COST = "Installation" %}
|
||||||
|
{% set L_EXTRAS = quotation.extras_label or "Extras" %}
|
||||||
|
{% set L_FINAL = "TOTAL DUE" %}
|
||||||
|
{% set L_COMMENTS = "NOTES" %}
|
||||||
|
{% set L_VALIDITY = "This quotation is valid for 30 days from the date of issue." %}
|
||||||
|
{% set L_ORG = "Organization" %}
|
||||||
|
{% set L_CONTACT = "Contact" %}
|
||||||
|
{% set L_ADDRESS = "Location" %}
|
||||||
|
{% set L_PHONE = "Phone" %}
|
||||||
|
{% set L_COMPANY_ADDR = "E.O. Antirriou Ioanninon 23, Agrinio, 30131, Greece" %}
|
||||||
|
{% set L_CONTACT_INFO = "CONTACT INFORMATION" %}
|
||||||
|
{% set L_PAYMENT_INFO = "PAYMENT DETAILS" %}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{# ── Derived values ── #}
|
||||||
|
{% set today = quotation.created_at[:10] %}
|
||||||
|
|
||||||
|
{# ── Find phone/email contacts + check if primary contact is already phone/email ── #}
|
||||||
|
{% set ns = namespace(customer_phone='', customer_email='', primary_is_phone=false, primary_is_email=false) %}
|
||||||
|
{% for contact in customer.contacts %}
|
||||||
|
{% if contact.type == 'phone' and contact.value %}{% if contact.primary %}{% set ns.customer_phone = contact.value %}{% set ns.primary_is_phone = true %}{% elif not ns.customer_phone %}{% set ns.customer_phone = contact.value %}{% endif %}{% endif %}
|
||||||
|
{% if contact.type == 'email' and contact.value %}{% if contact.primary %}{% set ns.customer_email = contact.value %}{% set ns.primary_is_email = true %}{% elif not ns.customer_email %}{% set ns.customer_email = contact.value %}{% endif %}{% endif %}
|
||||||
|
{% endfor %}
|
||||||
|
{% set customer_phone = ns.customer_phone %}
|
||||||
|
{% set customer_email = ns.customer_email %}
|
||||||
|
{% set primary_is_phone = ns.primary_is_phone %}
|
||||||
|
{% set primary_is_email = ns.primary_is_email %}
|
||||||
|
|
||||||
|
<!-- HEADER -->
|
||||||
|
<div class="header">
|
||||||
|
<div class="company-block">
|
||||||
|
<img class="logo" src="./logo.png" alt="BellSystems"/>
|
||||||
|
<p>{{ L_COMPANY_ADDR }}</p>
|
||||||
|
</div>
|
||||||
|
<div class="quotation-meta-block">
|
||||||
|
<div class="doc-type">{{ L_QUOTATION }}</div>
|
||||||
|
<div class="meta-line"><span class="meta-label">{{ L_NUMBER }}: </span><span class="meta-value">{{ quotation.quotation_number }}</span></div>
|
||||||
|
<div class="meta-line"><span class="meta-label">{{ L_DATE }}: </span><span class="meta-value">{{ today }}</span></div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- TITLE / SUBTITLE -->
|
||||||
|
{% if quotation.title %}
|
||||||
|
<div class="quotation-title">
|
||||||
|
<h2>{{ quotation.title }}</h2>
|
||||||
|
{% if quotation.subtitle %}<p>{{ quotation.subtitle }}</p>{% endif %}
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
<!-- CLIENT + ORDER META -->
|
||||||
|
<div class="info-row">
|
||||||
|
|
||||||
|
<div class="client-block">
|
||||||
|
<div class="block-title">{{ L_CLIENT }}</div>
|
||||||
|
<table class="fields"><tbody>{% if customer.organization %}<tr><td class="lbl">{{ L_ORG }}</td><td class="val">{{ customer.organization }}</td></tr>{% endif %}{% set name_parts = [customer.title, customer.name, customer.surname] | select | list %}{% if name_parts %}<tr><td class="lbl">{{ L_CONTACT }}</td><td class="val">{{ name_parts | join(' ') }}</td></tr>{% endif %}{% if quotation.client_location %}<tr><td class="lbl">{{ L_ADDRESS }}</td><td class="val">{{ quotation.client_location }}</td></tr>{% elif customer.location %}{% set loc_parts = [customer.location.address, customer.location.city, customer.location.postal_code, customer.location.region, customer.location.country] | select | list %}{% if loc_parts %}<tr><td class="lbl">{{ L_ADDRESS }}</td><td class="val">{{ loc_parts | join(', ') }}</td></tr>{% endif %}{% endif %}{% if customer_email %}<tr><td class="lbl">Email</td><td class="val">{{ customer_email }}</td></tr>{% endif %}{% if customer_phone %}<tr><td class="lbl">{{ L_PHONE }}</td><td class="val">{{ customer_phone }}</td></tr>{% endif %}</tbody></table>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="order-block">
|
||||||
|
<div class="block-title">{{ L_ORDER_META }}</div>
|
||||||
|
<table class="fields"><tbody>{% if quotation.order_type %}<tr><td class="lbl">{{ L_ORDER_TYPE }}</td><td class="val">{{ quotation.order_type }}</td></tr>{% endif %}{% if quotation.shipping_method %}<tr><td class="lbl">{{ L_SHIP_METHOD }}</td><td class="val">{{ quotation.shipping_method }}</td></tr>{% endif %}{% if quotation.estimated_shipping_date %}<tr><td class="lbl">{{ L_SHIP_DATE }}</td><td class="val">{{ quotation.estimated_shipping_date }}</td></tr>{% else %}<tr><td class="lbl">{{ L_SHIP_DATE }}</td><td class="val text-muted">—</td></tr>{% endif %}</tbody></table>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- ITEMS TABLE -->
|
||||||
|
<table class="items-table">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th style="width:38%">{{ L_DESC }}</th>
|
||||||
|
<th class="right" style="width:11%">{{ L_UNIT_COST }}</th>
|
||||||
|
<th class="center" style="width:7%">{{ L_DISC }}</th>
|
||||||
|
<th class="center" style="width:7%">{{ L_QTY }}</th>
|
||||||
|
<th class="center" style="width:7%">{{ L_UNIT }}</th>
|
||||||
|
<th class="center" style="width:6%">{{ L_VAT_COL }}</th>
|
||||||
|
<th class="right" style="width:12%">{{ L_TOTAL }}</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
{% for item in quotation.items %}
|
||||||
|
<tr>
|
||||||
|
<td>{% if lang == 'gr' %}{{ item.description_gr or item.description or '' }}{% else %}{{ item.description_en or item.description or '' }}{% endif %}</td>
|
||||||
|
<td class="right">{{ item.unit_cost | format_money }}</td>
|
||||||
|
<td class="center">
|
||||||
|
{% if item.discount_percent and item.discount_percent > 0 %}
|
||||||
|
{{ item.discount_percent | int }}%
|
||||||
|
{% else %}
|
||||||
|
<span class="dash">—</span>
|
||||||
|
{% endif %}
|
||||||
|
</td>
|
||||||
|
<td class="center">{{ item.quantity | int if item.quantity == (item.quantity | int) else item.quantity }}</td>
|
||||||
|
<td class="center muted">{{ item.unit_type }}</td>
|
||||||
|
<td class="center">
|
||||||
|
{% if item.vat_percent and item.vat_percent > 0 %}
|
||||||
|
{{ item.vat_percent | int }}%
|
||||||
|
{% else %}
|
||||||
|
<span class="dash">—</span>
|
||||||
|
{% endif %}
|
||||||
|
</td>
|
||||||
|
<td class="right">{{ item.line_total | format_money }}</td>
|
||||||
|
</tr>
|
||||||
|
{% endfor %}
|
||||||
|
{% if quotation.items | length == 0 %}
|
||||||
|
<tr>
|
||||||
|
<td colspan="7" class="text-muted" style="text-align:center; padding: 12px;">—</td>
|
||||||
|
</tr>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{# ── Shipping / Install as special rows ── #}
|
||||||
|
{% set has_special = (quotation.shipping_cost and quotation.shipping_cost > 0) or (quotation.install_cost and quotation.install_cost > 0) %}
|
||||||
|
{% if has_special %}
|
||||||
|
<tr class="special-spacer"><td colspan="7"></td></tr>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if quotation.shipping_cost and quotation.shipping_cost > 0 %}
|
||||||
|
{% set ship_net = quotation.shipping_cost * (1 - quotation.shipping_cost_discount / 100) %}
|
||||||
|
<tr class="special-row">
|
||||||
|
<td>{{ L_SHIPPING_COST }}{% if quotation.shipping_cost_discount and quotation.shipping_cost_discount > 0 %} <span style="font-size:7.5pt; color:#7a9cc8;">(-{{ quotation.shipping_cost_discount | int }}%)</span>{% endif %}</td>
|
||||||
|
<td class="right">{{ quotation.shipping_cost | format_money }}</td>
|
||||||
|
<td class="center"><span class="dash">—</span></td>
|
||||||
|
<td class="center">1</td>
|
||||||
|
<td class="center muted">—</td>
|
||||||
|
<td class="center"><span class="dash">—</span></td>
|
||||||
|
<td class="right">{{ ship_net | format_money }}</td>
|
||||||
|
</tr>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if quotation.install_cost and quotation.install_cost > 0 %}
|
||||||
|
{% set install_net = quotation.install_cost * (1 - quotation.install_cost_discount / 100) %}
|
||||||
|
<tr class="special-row">
|
||||||
|
<td>{{ L_INSTALL_COST }}{% if quotation.install_cost_discount and quotation.install_cost_discount > 0 %} <span style="font-size:7.5pt; color:#7a9cc8;">(-{{ quotation.install_cost_discount | int }}%)</span>{% endif %}</td>
|
||||||
|
<td class="right">{{ quotation.install_cost | format_money }}</td>
|
||||||
|
<td class="center"><span class="dash">—</span></td>
|
||||||
|
<td class="center">1</td>
|
||||||
|
<td class="center muted">—</td>
|
||||||
|
<td class="center"><span class="dash">—</span></td>
|
||||||
|
<td class="right">{{ install_net | format_money }}</td>
|
||||||
|
</tr>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
|
||||||
|
<!-- TOTALS + VAT NOTICE -->
|
||||||
|
<div class="below-table">
|
||||||
|
|
||||||
|
<div class="vat-notice">
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<table class="totals-table">
|
||||||
|
<tr>
|
||||||
|
<td class="label">{{ L_SUBTOTAL }}</td>
|
||||||
|
<td class="value">{{ quotation.subtotal_before_discount | format_money }}</td>
|
||||||
|
</tr>
|
||||||
|
{% if quotation.global_discount_percent and quotation.global_discount_percent > 0 %}
|
||||||
|
<tr class="discount-row">
|
||||||
|
<td class="label">{{ L_GLOBAL_DISC }} ({{ quotation.global_discount_percent | int }}%)</td>
|
||||||
|
<td class="value">- {{ quotation.global_discount_amount | format_money }}</td>
|
||||||
|
</tr>
|
||||||
|
<tr class="new-subtotal-row">
|
||||||
|
<td class="label">{{ L_NEW_SUBTOTAL }}</td>
|
||||||
|
<td class="value">{{ quotation.new_subtotal | format_money }}</td>
|
||||||
|
</tr>
|
||||||
|
{% endif %}
|
||||||
|
<tr class="vat-row">
|
||||||
|
<td class="label">{{ L_VAT }}</td>
|
||||||
|
<td class="value">{{ quotation.vat_amount | format_money }}</td>
|
||||||
|
</tr>
|
||||||
|
{% if quotation.extras_cost and quotation.extras_cost > 0 %}
|
||||||
|
<tr>
|
||||||
|
<td class="label">{{ L_EXTRAS }}</td>
|
||||||
|
<td class="value">{{ quotation.extras_cost | format_money }}</td>
|
||||||
|
</tr>
|
||||||
|
{% endif %}
|
||||||
|
<tr class="final-row">
|
||||||
|
<td class="label">{{ L_FINAL }}</td>
|
||||||
|
<td class="value">{{ quotation.final_total | format_money }}</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- SPACER: flexible gap between totals and notes -->
|
||||||
|
<div class="main-content-gap"></div>
|
||||||
|
|
||||||
|
<!-- COMMENTS / NOTES -->
|
||||||
|
{% set qn = quotation.quick_notes or {} %}
|
||||||
|
{% set has_quick = (qn.payment_advance and qn.payment_advance.enabled) or (qn.lead_time and qn.lead_time.enabled) or (qn.backup_relays and qn.backup_relays.enabled) %}
|
||||||
|
{% set has_comments = quotation.comments and quotation.comments | length > 0 %}
|
||||||
|
|
||||||
|
{% if has_quick or has_comments %}
|
||||||
|
<div class="comments-section">
|
||||||
|
<div class="section-title">{{ L_COMMENTS }}</div>
|
||||||
|
<ul>
|
||||||
|
|
||||||
|
{# ── Quick Notes ── #}
|
||||||
|
|
||||||
|
{# Payment Advance #}
|
||||||
|
{% if qn.payment_advance and qn.payment_advance.enabled %}
|
||||||
|
{% set pct = qn.payment_advance.percent | string %}
|
||||||
|
{% if lang == 'gr' %}
|
||||||
|
<li>Απαιτείται προκαταβολή <strong>{{ pct }}%</strong> με την επιβεβαίωση της παραγγελίας.</li>
|
||||||
|
{% else %}
|
||||||
|
<li><strong>{{ pct }}%</strong> advance payment is required upon order confirmation.</li>
|
||||||
|
{% endif %}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{# Lead Time #}
|
||||||
|
{% if qn.lead_time and qn.lead_time.enabled %}
|
||||||
|
{% set days = qn.lead_time.days | string %}
|
||||||
|
{% if lang == 'gr' %}
|
||||||
|
<li>Εκτιμώμενος χρόνος παράδοσης, <strong>{{ days }} εργάσιμες ημέρες</strong> από την επιβεβαίωση της παραγγελίας και παραλαβή της προκαταβολής.</li>
|
||||||
|
{% else %}
|
||||||
|
<li>Estimated delivery time is <strong>{{ days }} working days</strong> from order confirmation and receipt of advance payment.</li>
|
||||||
|
{% endif %}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{# Backup Relays #}
|
||||||
|
{% if qn.backup_relays and qn.backup_relays.enabled %}
|
||||||
|
{% set n = qn.backup_relays.count | int %}
|
||||||
|
{% if lang == 'gr' %}
|
||||||
|
{% if n == 1 %}
|
||||||
|
<li>Συμπεριλαμβάνονται: <strong>{{ n }} έξτρα Εφεδρικό Ρελέ Ισχύος</strong></li>
|
||||||
|
{% else %}
|
||||||
|
<li>Συμπεριλαμβάνονται: <strong>{{ n }} έξτρα Εφεδρικά Ρελέ Ισχύος</strong></li>
|
||||||
|
{% endif %}
|
||||||
|
{% else %}
|
||||||
|
{% if n == 1 %}
|
||||||
|
<li><strong>{{ n }} Extra Relay</strong> included as Backup, free of charge.</li>
|
||||||
|
{% else %}
|
||||||
|
<li><strong>{{ n }} Extra Relays</strong> included as Backups, free of charge.</li>
|
||||||
|
{% endif %}
|
||||||
|
{% endif %}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{# ── Dynamic comments ── #}
|
||||||
|
{% if has_comments %}
|
||||||
|
{% for comment in quotation.comments %}
|
||||||
|
{% if comment and comment.strip() %}
|
||||||
|
<li>{{ comment }}</li>
|
||||||
|
{% endif %}
|
||||||
|
{% endfor %}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
<!-- VALIDITY -->
|
||||||
|
<div class="footer">
|
||||||
|
<span class="validity">{{ L_VALIDITY }}</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- FIXED BOTTOM FOOTER: contact + payment (repeats every page) -->
|
||||||
|
<div class="fixed-footer">
|
||||||
|
|
||||||
|
<div class="footer-block">
|
||||||
|
<div class="footer-block-title">{{ L_CONTACT_INFO }}</div>
|
||||||
|
<dl>
|
||||||
|
<dt>{% if lang == 'gr' %}Εταιρεία{% else %}Company{% endif %}</dt>
|
||||||
|
<dd>BellSystems</dd>
|
||||||
|
<dt>{% if lang == 'gr' %}Τηλ.{% else %}Phone{% endif %}</dt>
|
||||||
|
<dd>+(30) 26410 33344</dd>
|
||||||
|
<dt>{% if lang == 'gr' %}Email{% else %}Email{% endif %}</dt>
|
||||||
|
<dd>sales@bellsystems.gr</dd>
|
||||||
|
<dt>Web</dt>
|
||||||
|
<dd>www.bellsystems.gr</dd>
|
||||||
|
<dt>Links</dt>
|
||||||
|
<dd><img src="./linktree.png" alt="linktr.ee/bellsystems" style="height: 7pt; vertical-align: middle;"/></dd>
|
||||||
|
</dl>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="footer-block">
|
||||||
|
<div class="footer-block-title">{{ L_PAYMENT_INFO }}</div>
|
||||||
|
<dl>
|
||||||
|
<dt>{% if lang == 'gr' %}Τράπεζα{% else %}Bank{% endif %}</dt>
|
||||||
|
<dd>Piraeus Bank</dd>
|
||||||
|
<dt>{% if lang == 'gr' %}Δικαιούχος{% else %}Holder{% endif %}</dt>
|
||||||
|
<dd>Pontikas Georgios</dd>
|
||||||
|
<dt>{% if lang == 'gr' %}Αριθμός{% else %}Account No.{% endif %}</dt>
|
||||||
|
<dd>6264-1484-35226</dd>
|
||||||
|
<dt>IBAN</dt>
|
||||||
|
<dd>GR8101712640006264148435226</dd>
|
||||||
|
<dt>BIC/SWIFT</dt>
|
||||||
|
<dd>PIRBGRAA</dd>
|
||||||
|
</dl>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="footer-ref">
|
||||||
|
<span class="ref-quot">{{ quotation.quotation_number }}</span>
|
||||||
|
<span class="ref-page">{% if lang == 'gr' %}Σελίδα {% else %}Page {% endif %}</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
0
backend/utils/__init__.py
Normal file
153
backend/utils/email.py
Normal file
@@ -0,0 +1,153 @@
|
|||||||
|
import logging
|
||||||
|
import resend
|
||||||
|
from config import settings
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def send_email(to: str, subject: str, html: str) -> None:
|
||||||
|
"""Send a transactional email via Resend. Logs errors but does not raise."""
|
||||||
|
try:
|
||||||
|
resend.api_key = settings.resend_api_key
|
||||||
|
resend.Emails.send({
|
||||||
|
"from": settings.email_from,
|
||||||
|
"to": to,
|
||||||
|
"subject": subject,
|
||||||
|
"html": html,
|
||||||
|
})
|
||||||
|
logger.info("Email sent to %s — subject: %s", to, subject)
|
||||||
|
except Exception as exc:
|
||||||
|
logger.error("Failed to send email to %s: %s", to, exc)
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
def send_device_assignment_invite(
|
||||||
|
customer_email: str,
|
||||||
|
serial_number: str,
|
||||||
|
device_name: str,
|
||||||
|
customer_name: str | None = None,
|
||||||
|
) -> None:
|
||||||
|
"""Notify a customer that a Bell Systems device has been assigned and shipped to them."""
|
||||||
|
greeting = f"Dear {customer_name}," if customer_name else "Dear Customer,"
|
||||||
|
html = f"""
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
<head><meta charset="UTF-8"><meta name="viewport" content="width=device-width, initial-scale=1.0"></head>
|
||||||
|
<body style="margin:0; padding:0; background-color:#f4f4f7; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif;">
|
||||||
|
<table width="100%" cellpadding="0" cellspacing="0" style="background-color:#f4f4f7; padding: 40px 0;">
|
||||||
|
<tr>
|
||||||
|
<td align="center">
|
||||||
|
<table width="600" cellpadding="0" cellspacing="0" style="background-color:#ffffff; border-radius:8px; overflow:hidden; box-shadow: 0 2px 8px rgba(0,0,0,0.08); max-width:600px; width:100%;">
|
||||||
|
|
||||||
|
<!-- Header -->
|
||||||
|
<tr>
|
||||||
|
<td style="background-color:#0f172a; padding: 32px 40px; text-align:center;">
|
||||||
|
<h1 style="color:#ffffff; margin:0; font-size:22px; font-weight:700; letter-spacing:1px;">BELLSYSTEMS</h1>
|
||||||
|
<p style="color:#94a3b8; margin:6px 0 0; font-size:13px; letter-spacing:2px; text-transform:uppercase;">Device Shipment Confirmation</p>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
<!-- Body -->
|
||||||
|
<tr>
|
||||||
|
<td style="padding: 40px 40px 32px;">
|
||||||
|
<p style="margin:0 0 20px; font-size:16px; color:#1e293b;">{greeting}</p>
|
||||||
|
|
||||||
|
<p style="margin:0 0 16px; font-size:15px; color:#334155; line-height:1.7;">
|
||||||
|
Your <strong>Bell Systems {device_name}</strong> device has been successfully manufactured and shipped.
|
||||||
|
We are delighted to have it on its way to you!
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<p style="margin:0 0 24px; font-size:15px; color:#334155; line-height:1.7;">
|
||||||
|
To get started, download our controller application from the Google Play Store and follow the in-app setup instructions.
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<!-- CTA Button -->
|
||||||
|
<table cellpadding="0" cellspacing="0" width="100%" style="margin: 28px 0;">
|
||||||
|
<tr>
|
||||||
|
<td align="center">
|
||||||
|
<a href="https://play.google.com/store/apps/details?id=com.bellsystems.vesper"
|
||||||
|
style="display:inline-block; background-color:#0f172a; color:#ffffff; text-decoration:none;
|
||||||
|
padding:14px 32px; border-radius:6px; font-size:15px; font-weight:600; letter-spacing:0.5px;">
|
||||||
|
Download on Google Play
|
||||||
|
</a>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
|
||||||
|
<!-- Device info card -->
|
||||||
|
<table width="100%" cellpadding="0" cellspacing="0" style="background:#f8fafc; border:1px solid #e2e8f0; border-radius:6px; margin-bottom:28px;">
|
||||||
|
<tr>
|
||||||
|
<td style="padding:16px 20px; border-bottom:1px solid #e2e8f0;">
|
||||||
|
<span style="font-size:12px; color:#64748b; text-transform:uppercase; letter-spacing:1px; font-weight:600;">Device</span><br>
|
||||||
|
<span style="font-size:15px; color:#0f172a; font-weight:600;">Bell Systems {device_name}</span>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td style="padding:16px 20px;">
|
||||||
|
<span style="font-size:12px; color:#64748b; text-transform:uppercase; letter-spacing:1px; font-weight:600;">Serial Number</span><br>
|
||||||
|
<code style="font-size:14px; color:#0f172a; background:#e2e8f0; padding:3px 8px; border-radius:4px; font-family:monospace;">{serial_number}</code>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
|
||||||
|
<p style="margin:0; font-size:15px; color:#334155; line-height:1.7;">
|
||||||
|
Thank you very much. We greatly appreciate your choice in our products.
|
||||||
|
</p>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
<!-- Footer -->
|
||||||
|
<tr>
|
||||||
|
<td style="background-color:#f8fafc; border-top:1px solid #e2e8f0; padding:24px 40px; text-align:center;">
|
||||||
|
<p style="margin:0 0 4px; font-size:14px; color:#0f172a; font-weight:700;">BellSystems.gr</p>
|
||||||
|
<p style="margin:0; font-size:12px; color:#94a3b8;">
|
||||||
|
If you did not expect this email, please contact us at
|
||||||
|
<a href="mailto:support@bellsystems.gr" style="color:#64748b;">support@bellsystems.gr</a>
|
||||||
|
</p>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
</table>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
"""
|
||||||
|
send_email(
|
||||||
|
to=customer_email,
|
||||||
|
subject=f"Your Bell Systems {device_name} is on its way! 🎉",
|
||||||
|
html=html,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def send_device_provisioned_alert(
|
||||||
|
admin_email: str,
|
||||||
|
serial_number: str,
|
||||||
|
hw_type: str,
|
||||||
|
) -> None:
|
||||||
|
"""Internal alert sent to an admin when a device reaches provisioned status."""
|
||||||
|
html = f"""
|
||||||
|
<div style="font-family: sans-serif; max-width: 600px; margin: 0 auto;">
|
||||||
|
<h2 style="color: #111827;">Device Provisioned</h2>
|
||||||
|
<p>A Vesper device has successfully provisioned and is ready to ship.</p>
|
||||||
|
<table style="border-collapse: collapse; width: 100%; margin-top: 16px;">
|
||||||
|
<tr>
|
||||||
|
<td style="padding: 6px 12px; font-weight: bold; background: #f9fafb;">Serial Number</td>
|
||||||
|
<td style="padding: 6px 12px; font-family: monospace;">{serial_number}</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td style="padding: 6px 12px; font-weight: bold; background: #f9fafb;">Board Type</td>
|
||||||
|
<td style="padding: 6px 12px;">{hw_type.upper()}</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
<p style="margin-top: 24px;">
|
||||||
|
<a href="#" style="color: #2563eb;">View in Admin Console</a>
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
"""
|
||||||
|
send_email(
|
||||||
|
to=admin_email,
|
||||||
|
subject=f"[Vesper] Device provisioned — {serial_number}",
|
||||||
|
html=html,
|
||||||
|
)
|
||||||
BIN
backend/utils/emails/assets/bell_systems_horizontal_darkMode.png
Normal file
|
After Width: | Height: | Size: 16 KiB |
220
backend/utils/emails/device_assigned_mail.py
Normal file
@@ -0,0 +1,220 @@
|
|||||||
|
import logging
|
||||||
|
import base64
|
||||||
|
import os
|
||||||
|
import resend
|
||||||
|
from config import settings
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
_LOGO_PATH = os.path.join(os.path.dirname(__file__), "assets", "bell_systems_horizontal_darkMode.png")
|
||||||
|
try:
|
||||||
|
with open(_LOGO_PATH, "rb") as _f:
|
||||||
|
_LOGO_B64 = base64.b64encode(_f.read()).decode()
|
||||||
|
_LOGO_SRC = f"data:image/png;base64,{_LOGO_B64}"
|
||||||
|
except Exception:
|
||||||
|
_LOGO_SRC = ""
|
||||||
|
|
||||||
|
|
||||||
|
def send_email(to: str, subject: str, html: str) -> None:
|
||||||
|
"""Send a transactional email via Resend."""
|
||||||
|
try:
|
||||||
|
resend.api_key = settings.resend_api_key
|
||||||
|
resend.Emails.send({
|
||||||
|
"from": settings.email_from,
|
||||||
|
"to": to,
|
||||||
|
"subject": subject,
|
||||||
|
"html": html,
|
||||||
|
})
|
||||||
|
logger.info("Email sent to %s — subject: %s", to, subject)
|
||||||
|
except Exception as exc:
|
||||||
|
logger.error("Failed to send email to %s: %s", to, exc)
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
_OPT_IN_URL = "https://play.google.com/apps/testing/com.bellsystems.vesper"
|
||||||
|
_APP_URL = "https://play.google.com/store/apps/details?id=com.bellsystems.vesper"
|
||||||
|
|
||||||
|
|
||||||
|
def send_device_assigned_email(
|
||||||
|
user_email: str,
|
||||||
|
serial_number: str,
|
||||||
|
device_name: str,
|
||||||
|
user_name: str | None = None,
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Notify a user that a BellSystems device has been assigned to their account,
|
||||||
|
with links to opt in to the Vesper beta programme and download the app.
|
||||||
|
"""
|
||||||
|
greeting = f"Dear {user_name}," if user_name else "Dear valued customer,"
|
||||||
|
|
||||||
|
html = f"""<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||||
|
<title>Your BellSystems Device Is Ready</title>
|
||||||
|
</head>
|
||||||
|
<body style="margin:0; padding:0; background-color:#0d1117; font-family:'Helvetica Neue', Helvetica, Arial, sans-serif;">
|
||||||
|
<table width="100%" cellpadding="0" cellspacing="0" style="background-color:#0d1117; padding:40px 16px;">
|
||||||
|
<tr>
|
||||||
|
<td align="center">
|
||||||
|
<table width="580" cellpadding="0" cellspacing="0"
|
||||||
|
style="background-color:#161b22; border-radius:12px; overflow:hidden;
|
||||||
|
box-shadow:0 4px 24px rgba(0,0,0,0.5); max-width:580px; width:100%;
|
||||||
|
border:1px solid #30363d;">
|
||||||
|
|
||||||
|
<!-- Header with logo -->
|
||||||
|
<tr>
|
||||||
|
<td style="background-color:#0f172a; padding:32px 40px 28px; text-align:center;
|
||||||
|
border-bottom:1px solid #21262d;">
|
||||||
|
{"<img src='" + _LOGO_SRC + "' alt='BellSystems' width='180' style='display:block; margin:0 auto; max-width:180px;'>" if _LOGO_SRC else "<h1 style='color:#ffffff; margin:0; font-size:22px; font-weight:700; letter-spacing:1px;'>BELLSYSTEMS</h1>"}
|
||||||
|
<p style="color:#64748b; margin:14px 0 0; font-size:11px; letter-spacing:2.5px;
|
||||||
|
text-transform:uppercase; font-weight:600;">Device Activation</p>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
<!-- Body -->
|
||||||
|
<tr>
|
||||||
|
<td style="padding:36px 40px 28px;">
|
||||||
|
|
||||||
|
<p style="margin:0 0 24px; font-size:16px; color:#c9d1d9; font-weight:500;">
|
||||||
|
{greeting}
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<p style="margin:0 0 18px; font-size:15px; color:#8b949e; line-height:1.75;">
|
||||||
|
Exciting news — your
|
||||||
|
<strong style="color:#c9d1d9;">BellSystems {device_name}</strong>
|
||||||
|
has been assigned to your account and is ready to use!
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<p style="margin:0 0 28px; font-size:15px; color:#8b949e; line-height:1.75;">
|
||||||
|
To get started, join the <strong style="color:#c9d1d9;">Vesper</strong> programme
|
||||||
|
and download the companion app from the Google Play Store. The app gives you full
|
||||||
|
control over your device, including scheduling, customisation, and real-time
|
||||||
|
monitoring.
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<!-- CTA buttons -->
|
||||||
|
<table cellpadding="0" cellspacing="0" width="100%" style="margin:0 0 32px;">
|
||||||
|
<tr>
|
||||||
|
<td align="center" style="padding-bottom:12px;">
|
||||||
|
<a href="{_OPT_IN_URL}"
|
||||||
|
style="display:inline-block; background-color:#238636; color:#ffffff;
|
||||||
|
text-decoration:none; padding:14px 32px; border-radius:8px;
|
||||||
|
font-size:14px; font-weight:700; letter-spacing:0.4px;
|
||||||
|
border:1px solid #2ea043; width:240px; text-align:center;">
|
||||||
|
Join the Vesper Programme
|
||||||
|
</a>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td align="center">
|
||||||
|
<a href="{_APP_URL}"
|
||||||
|
style="display:inline-block; background-color:#1f6feb; color:#ffffff;
|
||||||
|
text-decoration:none; padding:14px 32px; border-radius:8px;
|
||||||
|
font-size:14px; font-weight:700; letter-spacing:0.4px;
|
||||||
|
border:1px solid #388bfd; width:240px; text-align:center;">
|
||||||
|
Download on Google Play
|
||||||
|
</a>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
|
||||||
|
<!-- Device info card -->
|
||||||
|
<table width="100%" cellpadding="0" cellspacing="0"
|
||||||
|
style="background:#0d1117; border:1px solid #30363d; border-radius:8px; margin-bottom:28px;">
|
||||||
|
<tr>
|
||||||
|
<td style="padding:16px 20px; border-bottom:1px solid #21262d;">
|
||||||
|
<span style="font-size:11px; color:#58a6ff; text-transform:uppercase;
|
||||||
|
letter-spacing:1.2px; font-weight:700;">Device Model</span><br>
|
||||||
|
<span style="font-size:15px; color:#c9d1d9; font-weight:600; margin-top:4px; display:block;">
|
||||||
|
BellSystems {device_name}
|
||||||
|
</span>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td style="padding:16px 20px;">
|
||||||
|
<span style="font-size:11px; color:#58a6ff; text-transform:uppercase;
|
||||||
|
letter-spacing:1.2px; font-weight:700;">Serial Number</span><br>
|
||||||
|
<code style="font-size:14px; color:#79c0ff; background:#161b22;
|
||||||
|
padding:4px 10px; border-radius:4px; font-family:monospace;
|
||||||
|
border:1px solid #30363d; margin-top:6px; display:inline-block;">
|
||||||
|
{serial_number}
|
||||||
|
</code>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
|
||||||
|
<!-- How it works steps -->
|
||||||
|
<table width="100%" cellpadding="0" cellspacing="0"
|
||||||
|
style="background:#0d1117; border:1px solid #30363d; border-radius:8px; margin-bottom:28px;">
|
||||||
|
<tr>
|
||||||
|
<td style="padding:16px 20px; border-bottom:1px solid #21262d;">
|
||||||
|
<span style="font-size:11px; color:#8b949e; text-transform:uppercase;
|
||||||
|
letter-spacing:1.2px; font-weight:700;">Getting Started</span>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td style="padding:14px 20px; border-bottom:1px solid #21262d;">
|
||||||
|
<span style="color:#58a6ff; font-weight:700; font-size:13px;">1 </span>
|
||||||
|
<span style="color:#8b949e; font-size:13px; line-height:1.6;">
|
||||||
|
Click <strong style="color:#c9d1d9;">Join the Vesper Programme</strong> above to opt in via the Google Play testing programme.
|
||||||
|
</span>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td style="padding:14px 20px; border-bottom:1px solid #21262d;">
|
||||||
|
<span style="color:#58a6ff; font-weight:700; font-size:13px;">2 </span>
|
||||||
|
<span style="color:#8b949e; font-size:13px; line-height:1.6;">
|
||||||
|
Download the <strong style="color:#c9d1d9;">Vesper</strong> app from the Google Play Store.
|
||||||
|
</span>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td style="padding:14px 20px;">
|
||||||
|
<span style="color:#58a6ff; font-weight:700; font-size:13px;">3 </span>
|
||||||
|
<span style="color:#8b949e; font-size:13px; line-height:1.6;">
|
||||||
|
Sign in with your account and your device will appear automatically.
|
||||||
|
</span>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
|
||||||
|
<p style="margin:0; font-size:14px; color:#6e7681; line-height:1.7;">
|
||||||
|
If you have any questions or need assistance with setup, our support team is
|
||||||
|
always happy to help.
|
||||||
|
</p>
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
<!-- Footer -->
|
||||||
|
<tr>
|
||||||
|
<td style="background-color:#0d1117; border-top:1px solid #21262d;
|
||||||
|
padding:24px 40px; text-align:center;">
|
||||||
|
<p style="margin:0 0 6px; font-size:13px; color:#8b949e; font-weight:600;">
|
||||||
|
BellSystems.gr
|
||||||
|
</p>
|
||||||
|
<p style="margin:0; font-size:12px; color:#6e7681;">
|
||||||
|
Questions? Contact us at
|
||||||
|
<a href="mailto:support@bellsystems.gr"
|
||||||
|
style="color:#58a6ff; text-decoration:none;">support@bellsystems.gr</a>
|
||||||
|
</p>
|
||||||
|
<p style="margin:8px 0 0; font-size:11px; color:#484f58;">
|
||||||
|
If you did not expect this notification, please disregard this message.
|
||||||
|
</p>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
</table>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
</body>
|
||||||
|
</html>"""
|
||||||
|
|
||||||
|
send_email(
|
||||||
|
to=user_email,
|
||||||
|
subject=f"Your BellSystems {device_name} is ready — get started now!",
|
||||||
|
html=html,
|
||||||
|
)
|
||||||
155
backend/utils/emails/device_mfged_mail.py
Normal file
@@ -0,0 +1,155 @@
|
|||||||
|
import logging
|
||||||
|
import base64
|
||||||
|
import os
|
||||||
|
import resend
|
||||||
|
from config import settings
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Embed logo as base64 so it works in any email client without a public URL
|
||||||
|
_LOGO_PATH = os.path.join(os.path.dirname(__file__), "assets", "bell_systems_horizontal_darkMode.png")
|
||||||
|
try:
|
||||||
|
with open(_LOGO_PATH, "rb") as _f:
|
||||||
|
_LOGO_B64 = base64.b64encode(_f.read()).decode()
|
||||||
|
_LOGO_SRC = f"data:image/png;base64,{_LOGO_B64}"
|
||||||
|
except Exception:
|
||||||
|
_LOGO_SRC = "" # fallback: image won't appear but email still sends
|
||||||
|
|
||||||
|
|
||||||
|
def send_email(to: str, subject: str, html: str) -> None:
|
||||||
|
"""Send a transactional email via Resend."""
|
||||||
|
try:
|
||||||
|
resend.api_key = settings.resend_api_key
|
||||||
|
resend.Emails.send({
|
||||||
|
"from": settings.email_from,
|
||||||
|
"to": to,
|
||||||
|
"subject": subject,
|
||||||
|
"html": html,
|
||||||
|
})
|
||||||
|
logger.info("Email sent to %s — subject: %s", to, subject)
|
||||||
|
except Exception as exc:
|
||||||
|
logger.error("Failed to send email to %s: %s", to, exc)
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
def send_device_manufactured_email(
|
||||||
|
customer_email: str,
|
||||||
|
serial_number: str,
|
||||||
|
device_name: str,
|
||||||
|
customer_name: str | None = None,
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Notify a customer that their BellSystems device has been manufactured
|
||||||
|
and is being prepared for shipment.
|
||||||
|
"""
|
||||||
|
greeting = f"Dear {customer_name}," if customer_name else "Dear valued customer,"
|
||||||
|
|
||||||
|
html = f"""<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||||
|
<title>Your BellSystems Device Has Been Manufactured</title>
|
||||||
|
</head>
|
||||||
|
<body style="margin:0; padding:0; background-color:#0d1117; font-family:'Helvetica Neue', Helvetica, Arial, sans-serif;">
|
||||||
|
<table width="100%" cellpadding="0" cellspacing="0" style="background-color:#0d1117; padding:40px 16px;">
|
||||||
|
<tr>
|
||||||
|
<td align="center">
|
||||||
|
<table width="580" cellpadding="0" cellspacing="0"
|
||||||
|
style="background-color:#161b22; border-radius:12px; overflow:hidden;
|
||||||
|
box-shadow:0 4px 24px rgba(0,0,0,0.5); max-width:580px; width:100%;
|
||||||
|
border:1px solid #30363d;">
|
||||||
|
|
||||||
|
<!-- Header with logo -->
|
||||||
|
<tr>
|
||||||
|
<td style="background-color:#0f172a; padding:32px 40px 28px; text-align:center;
|
||||||
|
border-bottom:1px solid #21262d;">
|
||||||
|
{"<img src='" + _LOGO_SRC + "' alt='BellSystems' width='180' style='display:block; margin:0 auto; max-width:180px;'>" if _LOGO_SRC else "<h1 style='color:#ffffff; margin:0; font-size:22px; font-weight:700; letter-spacing:1px;'>BELLSYSTEMS</h1>"}
|
||||||
|
<p style="color:#64748b; margin:14px 0 0; font-size:11px; letter-spacing:2.5px;
|
||||||
|
text-transform:uppercase; font-weight:600;">Manufacturing Update</p>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
<!-- Body -->
|
||||||
|
<tr>
|
||||||
|
<td style="padding:36px 40px 28px;">
|
||||||
|
|
||||||
|
<p style="margin:0 0 24px; font-size:16px; color:#c9d1d9; font-weight:500;">
|
||||||
|
{greeting}
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<p style="margin:0 0 18px; font-size:15px; color:#8b949e; line-height:1.75;">
|
||||||
|
We are pleased to inform you that your
|
||||||
|
<strong style="color:#c9d1d9;">BellSystems {device_name}</strong>
|
||||||
|
has been successfully manufactured and has passed all quality checks.
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<p style="margin:0 0 28px; font-size:15px; color:#8b949e; line-height:1.75;">
|
||||||
|
Your device is now being prepared for delivery. You will receive a separate
|
||||||
|
notification with tracking information once it has been dispatched.
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<!-- Device info card -->
|
||||||
|
<table width="100%" cellpadding="0" cellspacing="0"
|
||||||
|
style="background:#0d1117; border:1px solid #30363d; border-radius:8px; margin-bottom:32px;">
|
||||||
|
<tr>
|
||||||
|
<td style="padding:16px 20px; border-bottom:1px solid #21262d;">
|
||||||
|
<span style="font-size:11px; color:#58a6ff; text-transform:uppercase;
|
||||||
|
letter-spacing:1.2px; font-weight:700;">Device Model</span><br>
|
||||||
|
<span style="font-size:15px; color:#c9d1d9; font-weight:600; margin-top:4px; display:block;">
|
||||||
|
BellSystems {device_name}
|
||||||
|
</span>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td style="padding:16px 20px;">
|
||||||
|
<span style="font-size:11px; color:#58a6ff; text-transform:uppercase;
|
||||||
|
letter-spacing:1.2px; font-weight:700;">Serial Number</span><br>
|
||||||
|
<code style="font-size:14px; color:#79c0ff; background:#161b22;
|
||||||
|
padding:4px 10px; border-radius:4px; font-family:monospace;
|
||||||
|
border:1px solid #30363d; margin-top:6px; display:inline-block;">
|
||||||
|
{serial_number}
|
||||||
|
</code>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
|
||||||
|
<p style="margin:0 0 8px; font-size:14px; color:#6e7681; line-height:1.7;">
|
||||||
|
Thank you for choosing BellSystems. We take great pride in crafting each device
|
||||||
|
with care and precision, and we look forward to delivering an exceptional
|
||||||
|
experience to you.
|
||||||
|
</p>
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
<!-- Footer -->
|
||||||
|
<tr>
|
||||||
|
<td style="background-color:#0d1117; border-top:1px solid #21262d;
|
||||||
|
padding:24px 40px; text-align:center;">
|
||||||
|
<p style="margin:0 0 6px; font-size:13px; color:#8b949e; font-weight:600;">
|
||||||
|
BellSystems.gr
|
||||||
|
</p>
|
||||||
|
<p style="margin:0; font-size:12px; color:#6e7681;">
|
||||||
|
Questions? Contact us at
|
||||||
|
<a href="mailto:support@bellsystems.gr"
|
||||||
|
style="color:#58a6ff; text-decoration:none;">support@bellsystems.gr</a>
|
||||||
|
</p>
|
||||||
|
<p style="margin:8px 0 0; font-size:11px; color:#484f58;">
|
||||||
|
If you did not expect this notification, please disregard this message.
|
||||||
|
</p>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
</table>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
</body>
|
||||||
|
</html>"""
|
||||||
|
|
||||||
|
send_email(
|
||||||
|
to=customer_email,
|
||||||
|
subject=f"Your BellSystems {device_name} has been manufactured",
|
||||||
|
html=html,
|
||||||
|
)
|
||||||
215
backend/utils/nvs_generator.py
Normal file
@@ -0,0 +1,215 @@
|
|||||||
|
"""
|
||||||
|
Pure-Python ESP32 NVS partition binary generator.
|
||||||
|
|
||||||
|
Generates a binary-compatible NVS partition for a Vesper device identity.
|
||||||
|
No ESP-IDF toolchain required on the server.
|
||||||
|
|
||||||
|
NVS partition layout (ESP32 NVS format v2):
|
||||||
|
- Partition size: 0x5000 (20480 bytes) = 5 pages
|
||||||
|
- Page size: 4096 bytes
|
||||||
|
- Page structure:
|
||||||
|
Offset 0x000 - 0x01F : Page header (32 bytes)
|
||||||
|
Offset 0x020 - 0x03F : Entry state bitmap (32 bytes, 2 bits per slot)
|
||||||
|
Offset 0x040 - 0xFFF : Entry storage (120 slots × 32 bytes each)
|
||||||
|
|
||||||
|
Entry state bitmap: 2 bits per entry
|
||||||
|
11 = empty
|
||||||
|
10 = written (active)
|
||||||
|
00 = erased
|
||||||
|
|
||||||
|
Page header (32 bytes):
|
||||||
|
uint32 page_state (0xFFFFFFFE = active)
|
||||||
|
uint32 sequence_number
|
||||||
|
uint8 version (0xFE = v2)
|
||||||
|
uint8 reserved[19]
|
||||||
|
uint32 crc32 (of bytes 4..27)
|
||||||
|
|
||||||
|
Entry (32 bytes):
|
||||||
|
uint8 ns_index (namespace index, 0 = namespace entry itself)
|
||||||
|
uint8 type (0x01=uint8, 0x02=uint16, 0x04=uint32, 0x08=uint64, 0x21=string, 0x41=blob)
|
||||||
|
uint8 span (number of 32-byte slots this entry occupies)
|
||||||
|
uint8 chunk_index (0xFF for non-blob)
|
||||||
|
uint32 crc32 (of the entry header bytes 0..3 and data, excluding the crc field itself)
|
||||||
|
char key[16] (null-terminated, max 15 chars + null)
|
||||||
|
<data> [8 bytes for primitives, or inline for short strings]
|
||||||
|
|
||||||
|
For strings:
|
||||||
|
- If len <= 8 bytes (incl. null): fits in the data field of the same entry (span=1)
|
||||||
|
- Longer strings: data follows in subsequent 32-byte "data entries" (span = 1 + ceil(strlen+1, 32))
|
||||||
|
- The entry header data field contains: uint16 data_size, uint16 reserved=0xFFFF, uint32 crc32_of_data
|
||||||
|
"""
|
||||||
|
|
||||||
|
import struct
|
||||||
|
import binascii
|
||||||
|
from typing import List, Tuple
|
||||||
|
|
||||||
|
|
||||||
|
NVS_PAGE_SIZE = 4096
|
||||||
|
NVS_PARTITION_SIZE = 0x5000 # 20480 bytes = 5 pages
|
||||||
|
NVS_ENTRY_SIZE = 32
|
||||||
|
NVS_ENTRY_COUNT = 126 # entries per page (first 3 slots are header + bitmap)
|
||||||
|
|
||||||
|
NVS_PAGE_STATE_ACTIVE = 0xFFFFFFFE
|
||||||
|
NVS_PAGE_VERSION = 0xFE
|
||||||
|
|
||||||
|
ENTRY_STATE_WRITTEN = 0b10 # 2 bits
|
||||||
|
ENTRY_STATE_EMPTY = 0b11 # 2 bits (erased flash)
|
||||||
|
|
||||||
|
ENTRY_TYPE_NAMESPACE = 0x01 # used for namespace entries (uint8)
|
||||||
|
ENTRY_TYPE_STRING = 0x21
|
||||||
|
|
||||||
|
|
||||||
|
def _crc32(data: bytes) -> int:
|
||||||
|
# ESP-IDF uses 0xFFFFFFFF as the initial CRC seed (matches esp_rom_crc32_le)
|
||||||
|
return binascii.crc32(data, 0xFFFFFFFF) & 0xFFFFFFFF
|
||||||
|
|
||||||
|
|
||||||
|
def _page_header_crc(seq: int, version: int) -> int:
|
||||||
|
"""CRC covers bytes 4..27 of the page header (seq + version + reserved)."""
|
||||||
|
buf = struct.pack("<IB", seq, version) + b"\xFF" * 19
|
||||||
|
return _crc32(buf)
|
||||||
|
|
||||||
|
|
||||||
|
def _entry_crc(ns_index: int, entry_type: int, span: int, chunk_index: int,
|
||||||
|
key: bytes, data: bytes) -> int:
|
||||||
|
"""CRC covers the entry minus the 4-byte crc field at offset 4..7."""
|
||||||
|
header_no_crc = struct.pack("BBBB", ns_index, entry_type, span, chunk_index)
|
||||||
|
return _crc32(header_no_crc + key + data)
|
||||||
|
|
||||||
|
|
||||||
|
def _pack_entry(ns_index: int, entry_type: int, span: int, chunk_index: int,
|
||||||
|
key: str, data: bytes) -> bytes:
|
||||||
|
key_bytes = key.encode("ascii").ljust(16, b"\x00")[:16]
|
||||||
|
data_bytes = data.ljust(8, b"\xFF")[:8]
|
||||||
|
crc = _entry_crc(ns_index, entry_type, span, chunk_index, key_bytes, data_bytes)
|
||||||
|
return struct.pack("BBBBI", ns_index, entry_type, span, chunk_index, crc) + key_bytes + data_bytes
|
||||||
|
|
||||||
|
|
||||||
|
def _bitmap_set_written(bitmap: bytearray, slot_index: int) -> None:
|
||||||
|
"""Mark a slot as written (10) in the entry state bitmap."""
|
||||||
|
bit_pos = slot_index * 2
|
||||||
|
byte_idx = bit_pos // 8
|
||||||
|
bit_off = bit_pos % 8
|
||||||
|
# Clear both bits for this slot (set to 00 then OR in 10)
|
||||||
|
bitmap[byte_idx] &= ~(0b11 << bit_off)
|
||||||
|
bitmap[byte_idx] |= (ENTRY_STATE_WRITTEN << bit_off)
|
||||||
|
|
||||||
|
|
||||||
|
def _build_namespace_entry(ns_name: str, ns_index: int) -> Tuple[bytes, int]:
|
||||||
|
"""Build the namespace declaration entry. ns_index is the assigned namespace id (1-based)."""
|
||||||
|
data = struct.pack("<B", ns_index) + b"\xFF" * 7
|
||||||
|
entry = _pack_entry(
|
||||||
|
ns_index=0,
|
||||||
|
entry_type=ENTRY_TYPE_NAMESPACE,
|
||||||
|
span=1,
|
||||||
|
chunk_index=0xFF,
|
||||||
|
key=ns_name,
|
||||||
|
data=data,
|
||||||
|
)
|
||||||
|
return entry, 1 # consumes 1 slot
|
||||||
|
|
||||||
|
|
||||||
|
def _build_string_entry(ns_index: int, key: str, value: str) -> Tuple[bytes, int]:
|
||||||
|
"""Build a string entry. May span multiple 32-byte slots for long strings."""
|
||||||
|
value_bytes = value.encode("utf-8") + b"\x00" # null-terminated
|
||||||
|
value_len = len(value_bytes)
|
||||||
|
|
||||||
|
# Pad to multiple of 32
|
||||||
|
padded_len = ((value_len + 31) // 32) * 32
|
||||||
|
value_padded = value_bytes.ljust(padded_len, b"\xFF")
|
||||||
|
|
||||||
|
span = 1 + (padded_len // 32)
|
||||||
|
|
||||||
|
# Data field in the header entry: uint16 data_size, uint16 0xFFFF, uint32 crc_of_data
|
||||||
|
data_crc = _crc32(value_bytes)
|
||||||
|
header_data = struct.pack("<HHI", value_len, 0xFFFF, data_crc)
|
||||||
|
|
||||||
|
entry = _pack_entry(
|
||||||
|
ns_index=ns_index,
|
||||||
|
entry_type=ENTRY_TYPE_STRING,
|
||||||
|
span=span,
|
||||||
|
chunk_index=0xFF,
|
||||||
|
key=key,
|
||||||
|
data=header_data,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Append data chunks (each 32 bytes)
|
||||||
|
full_entry = entry + value_padded
|
||||||
|
return full_entry, span
|
||||||
|
|
||||||
|
|
||||||
|
def _build_page(entries: List[bytes], slot_counts: List[int], seq: int = 0) -> bytes:
|
||||||
|
"""Assemble a full 4096-byte NVS page."""
|
||||||
|
# Build entry storage area
|
||||||
|
storage = bytearray(NVS_ENTRY_COUNT * NVS_ENTRY_SIZE) # all 0xFF (erased)
|
||||||
|
storage[:] = b"\xFF" * len(storage)
|
||||||
|
bitmap = bytearray(b"\xFF" * 32) # all slots empty (11 bits)
|
||||||
|
|
||||||
|
slot = 0
|
||||||
|
for entry_bytes, span in zip(entries, slot_counts):
|
||||||
|
entry_offset = slot * NVS_ENTRY_SIZE
|
||||||
|
storage[entry_offset:entry_offset + len(entry_bytes)] = entry_bytes
|
||||||
|
for s in range(span):
|
||||||
|
_bitmap_set_written(bitmap, slot + s)
|
||||||
|
slot += span
|
||||||
|
|
||||||
|
# Page header
|
||||||
|
header_crc = _page_header_crc(seq, NVS_PAGE_VERSION)
|
||||||
|
header = struct.pack(
|
||||||
|
"<IIBI19sI",
|
||||||
|
NVS_PAGE_STATE_ACTIVE,
|
||||||
|
seq,
|
||||||
|
NVS_PAGE_VERSION,
|
||||||
|
0, # padding
|
||||||
|
b"\xFF" * 19,
|
||||||
|
header_crc,
|
||||||
|
)
|
||||||
|
# Trim header to exactly 32 bytes
|
||||||
|
header = struct.pack("<I", NVS_PAGE_STATE_ACTIVE)
|
||||||
|
header += struct.pack("<I", seq)
|
||||||
|
header += struct.pack("<B", NVS_PAGE_VERSION)
|
||||||
|
header += b"\xFF" * 19
|
||||||
|
header += struct.pack("<I", header_crc)
|
||||||
|
assert len(header) == 32, f"Header size mismatch: {len(header)}"
|
||||||
|
|
||||||
|
page = header + bytes(bitmap) + bytes(storage)
|
||||||
|
assert len(page) == NVS_PAGE_SIZE, f"Page size mismatch: {len(page)}"
|
||||||
|
return page
|
||||||
|
|
||||||
|
|
||||||
|
def generate(serial_number: str, hw_family: str, hw_revision: str, legacy: bool = False) -> bytes:
|
||||||
|
"""Generate a 0x5000-byte NVS partition binary for a Vesper device.
|
||||||
|
|
||||||
|
serial_number: full SN string e.g. 'BSVSPR-26C13X-STD01R-X7KQA'
|
||||||
|
hw_family: board family e.g. 'vesper-standard', 'vesper-plus'
|
||||||
|
hw_revision: hardware revision string e.g. '1.0'
|
||||||
|
legacy: if True, writes old key names expected by legacy firmware (pre-new-schema):
|
||||||
|
device_uid, hw_type, hw_version
|
||||||
|
if False (default), writes new key names:
|
||||||
|
serial, hw_family, hw_revision
|
||||||
|
|
||||||
|
Returns raw bytes ready to flash at 0x9000.
|
||||||
|
"""
|
||||||
|
ns_index = 1 # first (and only) namespace
|
||||||
|
|
||||||
|
# Build entries for namespace "device_id"
|
||||||
|
ns_entry, ns_span = _build_namespace_entry("device_id", ns_index)
|
||||||
|
if legacy:
|
||||||
|
uid_entry, uid_span = _build_string_entry(ns_index, "device_uid", serial_number)
|
||||||
|
hwt_entry, hwt_span = _build_string_entry(ns_index, "hw_type", hw_family.lower())
|
||||||
|
hwv_entry, hwv_span = _build_string_entry(ns_index, "hw_version", hw_revision)
|
||||||
|
else:
|
||||||
|
uid_entry, uid_span = _build_string_entry(ns_index, "serial", serial_number)
|
||||||
|
hwt_entry, hwt_span = _build_string_entry(ns_index, "hw_family", hw_family.lower())
|
||||||
|
hwv_entry, hwv_span = _build_string_entry(ns_index, "hw_revision", hw_revision)
|
||||||
|
|
||||||
|
entries = [ns_entry, uid_entry, hwt_entry, hwv_entry]
|
||||||
|
spans = [ns_span, uid_span, hwt_span, hwv_span]
|
||||||
|
|
||||||
|
page0 = _build_page(entries, spans, seq=0)
|
||||||
|
|
||||||
|
# Remaining pages are blank (erased flash = 0xFF)
|
||||||
|
blank_page = b"\xFF" * NVS_PAGE_SIZE
|
||||||
|
remaining_pages = (NVS_PARTITION_SIZE // NVS_PAGE_SIZE) - 1
|
||||||
|
|
||||||
|
return page0 + blank_page * remaining_pages
|
||||||
78
backend/utils/serial_number.py
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
import random
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
MONTH_CODES = "ABCDEFGHIJKL"
|
||||||
|
SAFE_CHARS = "ABCDEFGHJKLMNPQRSTUVWXYZ23456789" # No 0, O, 1, I — avoids label confusion
|
||||||
|
|
||||||
|
# Family segment (chars 3-6 of segment 1, after "BS")
|
||||||
|
BOARD_FAMILY_CODES = {
|
||||||
|
"vesper": "VSPR",
|
||||||
|
"vesper_plus": "VSPR",
|
||||||
|
"vesper_pro": "VSPR",
|
||||||
|
"agnus": "AGNS",
|
||||||
|
"agnus_mini": "AGNS",
|
||||||
|
"chronos": "CRNS",
|
||||||
|
"chronos_pro": "CRNS",
|
||||||
|
}
|
||||||
|
|
||||||
|
# Variant segment (first 3 chars of segment 3)
|
||||||
|
BOARD_VARIANT_CODES = {
|
||||||
|
"vesper": "STD",
|
||||||
|
"vesper_plus": "PLS",
|
||||||
|
"vesper_pro": "PRO",
|
||||||
|
"agnus": "STD",
|
||||||
|
"agnus_mini": "MIN",
|
||||||
|
"chronos": "STD",
|
||||||
|
"chronos_pro": "PRO",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _version_suffix(board_version: str) -> str:
|
||||||
|
"""Convert version string to 3-char suffix.
|
||||||
|
|
||||||
|
Rules:
|
||||||
|
- Strip the dot: "2.3" → "23", "10.2" → "102"
|
||||||
|
- If result is 2 digits, append "R": "23" → "23R"
|
||||||
|
- If result is already 3 digits, use as-is: "102" → "102"
|
||||||
|
"""
|
||||||
|
digits = board_version.replace(".", "")
|
||||||
|
if len(digits) >= 3:
|
||||||
|
return digits[:3]
|
||||||
|
return digits.ljust(2, "0") + "R"
|
||||||
|
|
||||||
|
|
||||||
|
def generate_serial(board_type: str, board_version: str) -> str:
|
||||||
|
"""Generate a serial number in the format BSFFFF-YYMDDFX-VVVHHH-XXXXXX.
|
||||||
|
|
||||||
|
Format: BSFFFF-YYMDDf-VVVvvv-XXXXXX
|
||||||
|
BS = Bell Systems (static)
|
||||||
|
FFFF = 4-char family code (VSPR, AGNS, CRNS)
|
||||||
|
YY = 2-digit year
|
||||||
|
M = month code A-L
|
||||||
|
DD = 2-digit day
|
||||||
|
f = random filler char
|
||||||
|
VVV = 3-char variant (STD, PLS, PRO, MIN)
|
||||||
|
vvv = 3-char version suffix (e.g. 23R, 102)
|
||||||
|
XXXXXX = 6-char random suffix
|
||||||
|
|
||||||
|
board_type: enum value e.g. 'vesper', 'vesper_plus', 'vesper_pro'
|
||||||
|
board_version: version string e.g. '2.3', '10.2'
|
||||||
|
"""
|
||||||
|
key = board_type.lower()
|
||||||
|
family = BOARD_FAMILY_CODES.get(key, "UNKN")
|
||||||
|
variant = BOARD_VARIANT_CODES.get(key, "UNK")
|
||||||
|
ver = _version_suffix(board_version)
|
||||||
|
|
||||||
|
now = datetime.utcnow()
|
||||||
|
year = now.strftime("%y")
|
||||||
|
month = MONTH_CODES[now.month - 1]
|
||||||
|
day = now.strftime("%d")
|
||||||
|
filler = random.choice(SAFE_CHARS)
|
||||||
|
suffix = "".join(random.choices(SAFE_CHARS, k=6))
|
||||||
|
|
||||||
|
seg1 = f"BS{family}" # e.g. BSVSPR
|
||||||
|
seg2 = f"{year}{month}{day}{filler}" # e.g. 26C13X
|
||||||
|
seg3 = f"{variant}{ver}" # e.g. PRO23R
|
||||||
|
seg4 = suffix # e.g. X9K4M2
|
||||||
|
|
||||||
|
return f"{seg1}-{seg2}-{seg3}-{seg4}"
|
||||||
0
data/.gitkeep
Normal file
11
deploy-host.sh
Executable file
@@ -0,0 +1,11 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
set -e
|
||||||
|
|
||||||
|
PROJECT=/home/bellsystems/bellsystems-cp
|
||||||
|
|
||||||
|
echo "Deploy started at $(date)"
|
||||||
|
cd "$PROJECT"
|
||||||
|
git fetch origin main
|
||||||
|
git reset --hard origin/main
|
||||||
|
docker compose up -d --build 2>&1
|
||||||
|
echo "Deploy finished at $(date)"
|
||||||
@@ -5,6 +5,14 @@ services:
|
|||||||
env_file: .env
|
env_file: .env
|
||||||
volumes:
|
volumes:
|
||||||
- ./backend:/app
|
- ./backend:/app
|
||||||
|
# Persistent data - lives outside the container
|
||||||
|
- ./data:/app/data
|
||||||
|
- ./data/built_melodies:/app/storage/built_melodies
|
||||||
|
- ./data/firmware:/app/storage/firmware
|
||||||
|
- ./data/flash_assets:/app/storage/flash_assets
|
||||||
|
- ./data/firebase-service-account.json:/app/firebase-service-account.json:ro
|
||||||
|
# Auto-deploy: project root so container can write the trigger file
|
||||||
|
- /home/bellsystems/bellsystems-cp:/home/bellsystems/bellsystems-cp
|
||||||
ports:
|
ports:
|
||||||
- "8000:8000"
|
- "8000:8000"
|
||||||
depends_on: []
|
depends_on: []
|
||||||
@@ -22,7 +30,7 @@ services:
|
|||||||
image: nginx:alpine
|
image: nginx:alpine
|
||||||
container_name: bellsystems-nginx
|
container_name: bellsystems-nginx
|
||||||
ports:
|
ports:
|
||||||
- "80:80"
|
- "${NGINX_PORT:-80}:80"
|
||||||
volumes:
|
volumes:
|
||||||
- ./nginx/nginx.conf:/etc/nginx/nginx.conf:ro
|
- ./nginx/nginx.conf:/etc/nginx/nginx.conf:ro
|
||||||
depends_on:
|
depends_on:
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
<html lang="en">
|
<html lang="en">
|
||||||
<head>
|
<head>
|
||||||
<meta charset="UTF-8" />
|
<meta charset="UTF-8" />
|
||||||
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
|
<link rel="icon" type="image/svg+xml" href="/favicon.svg" />
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||||
<title>BellSystems Admin</title>
|
<title>BellSystems Admin</title>
|
||||||
</head>
|
</head>
|
||||||
|
|||||||
734
frontend/package-lock.json
generated
@@ -8,7 +8,9 @@
|
|||||||
"name": "frontend",
|
"name": "frontend",
|
||||||
"version": "0.0.0",
|
"version": "0.0.0",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"esptool-js": "^0.5.7",
|
||||||
"leaflet": "^1.9.4",
|
"leaflet": "^1.9.4",
|
||||||
|
"qrcode": "^1.5.4",
|
||||||
"react": "^19.2.0",
|
"react": "^19.2.0",
|
||||||
"react-dom": "^19.2.0",
|
"react-dom": "^19.2.0",
|
||||||
"react-leaflet": "^5.0.0",
|
"react-leaflet": "^5.0.0",
|
||||||
@@ -25,7 +27,8 @@
|
|||||||
"eslint-plugin-react-refresh": "^0.4.24",
|
"eslint-plugin-react-refresh": "^0.4.24",
|
||||||
"globals": "^16.5.0",
|
"globals": "^16.5.0",
|
||||||
"tailwindcss": "^4.1.18",
|
"tailwindcss": "^4.1.18",
|
||||||
"vite": "^7.3.1"
|
"vite": "^7.3.1",
|
||||||
|
"vite-plugin-svgr": "^4.5.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@babel/code-frame": {
|
"node_modules/@babel/code-frame": {
|
||||||
@@ -1029,6 +1032,29 @@
|
|||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
|
"node_modules/@rollup/pluginutils": {
|
||||||
|
"version": "5.3.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@rollup/pluginutils/-/pluginutils-5.3.0.tgz",
|
||||||
|
"integrity": "sha512-5EdhGZtnu3V88ces7s53hhfK5KSASnJZv8Lulpc04cWO3REESroJXg73DFsOmgbU2BhwV0E20bu2IDZb3VKW4Q==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@types/estree": "^1.0.0",
|
||||||
|
"estree-walker": "^2.0.2",
|
||||||
|
"picomatch": "^4.0.2"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=14.0.0"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"rollup": "^1.20.0||^2.0.0||^3.0.0||^4.0.0"
|
||||||
|
},
|
||||||
|
"peerDependenciesMeta": {
|
||||||
|
"rollup": {
|
||||||
|
"optional": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/@rollup/rollup-android-arm-eabi": {
|
"node_modules/@rollup/rollup-android-arm-eabi": {
|
||||||
"version": "4.57.1",
|
"version": "4.57.1",
|
||||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.57.1.tgz",
|
"resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.57.1.tgz",
|
||||||
@@ -1379,6 +1405,231 @@
|
|||||||
"win32"
|
"win32"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
"node_modules/@svgr/babel-plugin-add-jsx-attribute": {
|
||||||
|
"version": "8.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@svgr/babel-plugin-add-jsx-attribute/-/babel-plugin-add-jsx-attribute-8.0.0.tgz",
|
||||||
|
"integrity": "sha512-b9MIk7yhdS1pMCZM8VeNfUlSKVRhsHZNMl5O9SfaX0l0t5wjdgu4IDzGB8bpnGBBOjGST3rRFVsaaEtI4W6f7g==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=14"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"type": "github",
|
||||||
|
"url": "https://github.com/sponsors/gregberge"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"@babel/core": "^7.0.0-0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@svgr/babel-plugin-remove-jsx-attribute": {
|
||||||
|
"version": "8.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@svgr/babel-plugin-remove-jsx-attribute/-/babel-plugin-remove-jsx-attribute-8.0.0.tgz",
|
||||||
|
"integrity": "sha512-BcCkm/STipKvbCl6b7QFrMh/vx00vIP63k2eM66MfHJzPr6O2U0jYEViXkHJWqXqQYjdeA9cuCl5KWmlwjDvbA==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=14"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"type": "github",
|
||||||
|
"url": "https://github.com/sponsors/gregberge"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"@babel/core": "^7.0.0-0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@svgr/babel-plugin-remove-jsx-empty-expression": {
|
||||||
|
"version": "8.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@svgr/babel-plugin-remove-jsx-empty-expression/-/babel-plugin-remove-jsx-empty-expression-8.0.0.tgz",
|
||||||
|
"integrity": "sha512-5BcGCBfBxB5+XSDSWnhTThfI9jcO5f0Ai2V24gZpG+wXF14BzwxxdDb4g6trdOux0rhibGs385BeFMSmxtS3uA==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=14"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"type": "github",
|
||||||
|
"url": "https://github.com/sponsors/gregberge"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"@babel/core": "^7.0.0-0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@svgr/babel-plugin-replace-jsx-attribute-value": {
|
||||||
|
"version": "8.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@svgr/babel-plugin-replace-jsx-attribute-value/-/babel-plugin-replace-jsx-attribute-value-8.0.0.tgz",
|
||||||
|
"integrity": "sha512-KVQ+PtIjb1BuYT3ht8M5KbzWBhdAjjUPdlMtpuw/VjT8coTrItWX6Qafl9+ji831JaJcu6PJNKCV0bp01lBNzQ==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=14"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"type": "github",
|
||||||
|
"url": "https://github.com/sponsors/gregberge"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"@babel/core": "^7.0.0-0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@svgr/babel-plugin-svg-dynamic-title": {
|
||||||
|
"version": "8.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@svgr/babel-plugin-svg-dynamic-title/-/babel-plugin-svg-dynamic-title-8.0.0.tgz",
|
||||||
|
"integrity": "sha512-omNiKqwjNmOQJ2v6ge4SErBbkooV2aAWwaPFs2vUY7p7GhVkzRkJ00kILXQvRhA6miHnNpXv7MRnnSjdRjK8og==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=14"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"type": "github",
|
||||||
|
"url": "https://github.com/sponsors/gregberge"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"@babel/core": "^7.0.0-0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@svgr/babel-plugin-svg-em-dimensions": {
|
||||||
|
"version": "8.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@svgr/babel-plugin-svg-em-dimensions/-/babel-plugin-svg-em-dimensions-8.0.0.tgz",
|
||||||
|
"integrity": "sha512-mURHYnu6Iw3UBTbhGwE/vsngtCIbHE43xCRK7kCw4t01xyGqb2Pd+WXekRRoFOBIY29ZoOhUCTEweDMdrjfi9g==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=14"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"type": "github",
|
||||||
|
"url": "https://github.com/sponsors/gregberge"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"@babel/core": "^7.0.0-0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@svgr/babel-plugin-transform-react-native-svg": {
|
||||||
|
"version": "8.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@svgr/babel-plugin-transform-react-native-svg/-/babel-plugin-transform-react-native-svg-8.1.0.tgz",
|
||||||
|
"integrity": "sha512-Tx8T58CHo+7nwJ+EhUwx3LfdNSG9R2OKfaIXXs5soiy5HtgoAEkDay9LIimLOcG8dJQH1wPZp/cnAv6S9CrR1Q==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=14"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"type": "github",
|
||||||
|
"url": "https://github.com/sponsors/gregberge"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"@babel/core": "^7.0.0-0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@svgr/babel-plugin-transform-svg-component": {
|
||||||
|
"version": "8.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@svgr/babel-plugin-transform-svg-component/-/babel-plugin-transform-svg-component-8.0.0.tgz",
|
||||||
|
"integrity": "sha512-DFx8xa3cZXTdb/k3kfPeaixecQLgKh5NVBMwD0AQxOzcZawK4oo1Jh9LbrcACUivsCA7TLG8eeWgrDXjTMhRmw==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"type": "github",
|
||||||
|
"url": "https://github.com/sponsors/gregberge"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"@babel/core": "^7.0.0-0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@svgr/babel-preset": {
|
||||||
|
"version": "8.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@svgr/babel-preset/-/babel-preset-8.1.0.tgz",
|
||||||
|
"integrity": "sha512-7EYDbHE7MxHpv4sxvnVPngw5fuR6pw79SkcrILHJ/iMpuKySNCl5W1qcwPEpU+LgyRXOaAFgH0KhwD18wwg6ug==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@svgr/babel-plugin-add-jsx-attribute": "8.0.0",
|
||||||
|
"@svgr/babel-plugin-remove-jsx-attribute": "8.0.0",
|
||||||
|
"@svgr/babel-plugin-remove-jsx-empty-expression": "8.0.0",
|
||||||
|
"@svgr/babel-plugin-replace-jsx-attribute-value": "8.0.0",
|
||||||
|
"@svgr/babel-plugin-svg-dynamic-title": "8.0.0",
|
||||||
|
"@svgr/babel-plugin-svg-em-dimensions": "8.0.0",
|
||||||
|
"@svgr/babel-plugin-transform-react-native-svg": "8.1.0",
|
||||||
|
"@svgr/babel-plugin-transform-svg-component": "8.0.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=14"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"type": "github",
|
||||||
|
"url": "https://github.com/sponsors/gregberge"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"@babel/core": "^7.0.0-0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@svgr/core": {
|
||||||
|
"version": "8.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@svgr/core/-/core-8.1.0.tgz",
|
||||||
|
"integrity": "sha512-8QqtOQT5ACVlmsvKOJNEaWmRPmcojMOzCz4Hs2BGG/toAp/K38LcsMRyLp349glq5AzJbCEeimEoxaX6v/fLrA==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@babel/core": "^7.21.3",
|
||||||
|
"@svgr/babel-preset": "8.1.0",
|
||||||
|
"camelcase": "^6.2.0",
|
||||||
|
"cosmiconfig": "^8.1.3",
|
||||||
|
"snake-case": "^3.0.4"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=14"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"type": "github",
|
||||||
|
"url": "https://github.com/sponsors/gregberge"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@svgr/hast-util-to-babel-ast": {
|
||||||
|
"version": "8.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@svgr/hast-util-to-babel-ast/-/hast-util-to-babel-ast-8.0.0.tgz",
|
||||||
|
"integrity": "sha512-EbDKwO9GpfWP4jN9sGdYwPBU0kdomaPIL2Eu4YwmgP+sJeXT+L7bMwJUBnhzfH8Q2qMBqZ4fJwpCyYsAN3mt2Q==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@babel/types": "^7.21.3",
|
||||||
|
"entities": "^4.4.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=14"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"type": "github",
|
||||||
|
"url": "https://github.com/sponsors/gregberge"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@svgr/plugin-jsx": {
|
||||||
|
"version": "8.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@svgr/plugin-jsx/-/plugin-jsx-8.1.0.tgz",
|
||||||
|
"integrity": "sha512-0xiIyBsLlr8quN+WyuxooNW9RJ0Dpr8uOnH/xrCVO8GLUcwHISwj1AG0k+LFzteTkAA0GbX0kj9q6Dk70PTiPA==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@babel/core": "^7.21.3",
|
||||||
|
"@svgr/babel-preset": "8.1.0",
|
||||||
|
"@svgr/hast-util-to-babel-ast": "8.0.0",
|
||||||
|
"svg-parser": "^2.0.4"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=14"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"type": "github",
|
||||||
|
"url": "https://github.com/sponsors/gregberge"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"@svgr/core": "*"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/@tailwindcss/node": {
|
"node_modules/@tailwindcss/node": {
|
||||||
"version": "4.1.18",
|
"version": "4.1.18",
|
||||||
"resolved": "https://registry.npmjs.org/@tailwindcss/node/-/node-4.1.18.tgz",
|
"resolved": "https://registry.npmjs.org/@tailwindcss/node/-/node-4.1.18.tgz",
|
||||||
@@ -1791,11 +2042,19 @@
|
|||||||
"url": "https://github.com/sponsors/epoberezkin"
|
"url": "https://github.com/sponsors/epoberezkin"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/ansi-regex": {
|
||||||
|
"version": "5.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
|
||||||
|
"integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=8"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/ansi-styles": {
|
"node_modules/ansi-styles": {
|
||||||
"version": "4.3.0",
|
"version": "4.3.0",
|
||||||
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
|
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
|
||||||
"integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
|
"integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"color-convert": "^2.0.1"
|
"color-convert": "^2.0.1"
|
||||||
@@ -1814,6 +2073,12 @@
|
|||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "Python-2.0"
|
"license": "Python-2.0"
|
||||||
},
|
},
|
||||||
|
"node_modules/atob-lite": {
|
||||||
|
"version": "2.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/atob-lite/-/atob-lite-2.0.0.tgz",
|
||||||
|
"integrity": "sha512-LEeSAWeh2Gfa2FtlQE1shxQ8zi5F9GHarrGKz08TMdODD5T4eH6BMsvtnhbWZ+XQn+Gb6om/917ucvRu7l7ukw==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/balanced-match": {
|
"node_modules/balanced-match": {
|
||||||
"version": "1.0.2",
|
"version": "1.0.2",
|
||||||
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
|
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
|
||||||
@@ -1886,6 +2151,19 @@
|
|||||||
"node": ">=6"
|
"node": ">=6"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/camelcase": {
|
||||||
|
"version": "6.3.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz",
|
||||||
|
"integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=10"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/sindresorhus"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/caniuse-lite": {
|
"node_modules/caniuse-lite": {
|
||||||
"version": "1.0.30001770",
|
"version": "1.0.30001770",
|
||||||
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001770.tgz",
|
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001770.tgz",
|
||||||
@@ -1924,11 +2202,21 @@
|
|||||||
"url": "https://github.com/chalk/chalk?sponsor=1"
|
"url": "https://github.com/chalk/chalk?sponsor=1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/cliui": {
|
||||||
|
"version": "6.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/cliui/-/cliui-6.0.0.tgz",
|
||||||
|
"integrity": "sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==",
|
||||||
|
"license": "ISC",
|
||||||
|
"dependencies": {
|
||||||
|
"string-width": "^4.2.0",
|
||||||
|
"strip-ansi": "^6.0.0",
|
||||||
|
"wrap-ansi": "^6.2.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/color-convert": {
|
"node_modules/color-convert": {
|
||||||
"version": "2.0.1",
|
"version": "2.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
|
||||||
"integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
|
"integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"color-name": "~1.1.4"
|
"color-name": "~1.1.4"
|
||||||
@@ -1941,7 +2229,6 @@
|
|||||||
"version": "1.1.4",
|
"version": "1.1.4",
|
||||||
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
|
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
|
||||||
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
|
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/concat-map": {
|
"node_modules/concat-map": {
|
||||||
@@ -1971,6 +2258,33 @@
|
|||||||
"url": "https://opencollective.com/express"
|
"url": "https://opencollective.com/express"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/cosmiconfig": {
|
||||||
|
"version": "8.3.6",
|
||||||
|
"resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-8.3.6.tgz",
|
||||||
|
"integrity": "sha512-kcZ6+W5QzcJ3P1Mt+83OUv/oHFqZHIx8DuxG6eZ5RGMERoLqp4BuGjhHLYGK+Kf5XVkQvqBSmAy/nGWN3qDgEA==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"import-fresh": "^3.3.0",
|
||||||
|
"js-yaml": "^4.1.0",
|
||||||
|
"parse-json": "^5.2.0",
|
||||||
|
"path-type": "^4.0.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=14"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/d-fischer"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"typescript": ">=4.9.5"
|
||||||
|
},
|
||||||
|
"peerDependenciesMeta": {
|
||||||
|
"typescript": {
|
||||||
|
"optional": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/cross-spawn": {
|
"node_modules/cross-spawn": {
|
||||||
"version": "7.0.6",
|
"version": "7.0.6",
|
||||||
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz",
|
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz",
|
||||||
@@ -2011,6 +2325,15 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/decamelize": {
|
||||||
|
"version": "1.2.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz",
|
||||||
|
"integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.10.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/deep-is": {
|
"node_modules/deep-is": {
|
||||||
"version": "0.1.4",
|
"version": "0.1.4",
|
||||||
"resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz",
|
"resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz",
|
||||||
@@ -2028,6 +2351,23 @@
|
|||||||
"node": ">=8"
|
"node": ">=8"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/dijkstrajs": {
|
||||||
|
"version": "1.0.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/dijkstrajs/-/dijkstrajs-1.0.3.tgz",
|
||||||
|
"integrity": "sha512-qiSlmBq9+BCdCA/L46dw8Uy93mloxsPSbwnm5yrKn2vMPiy8KyAskTF6zuV/j5BMsmOGZDPs7KjU+mjb670kfA==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/dot-case": {
|
||||||
|
"version": "3.0.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/dot-case/-/dot-case-3.0.4.tgz",
|
||||||
|
"integrity": "sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"no-case": "^3.0.4",
|
||||||
|
"tslib": "^2.0.3"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/electron-to-chromium": {
|
"node_modules/electron-to-chromium": {
|
||||||
"version": "1.5.286",
|
"version": "1.5.286",
|
||||||
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.286.tgz",
|
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.286.tgz",
|
||||||
@@ -2035,6 +2375,12 @@
|
|||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "ISC"
|
"license": "ISC"
|
||||||
},
|
},
|
||||||
|
"node_modules/emoji-regex": {
|
||||||
|
"version": "8.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
|
||||||
|
"integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/enhanced-resolve": {
|
"node_modules/enhanced-resolve": {
|
||||||
"version": "5.19.0",
|
"version": "5.19.0",
|
||||||
"resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.19.0.tgz",
|
"resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.19.0.tgz",
|
||||||
@@ -2049,6 +2395,29 @@
|
|||||||
"node": ">=10.13.0"
|
"node": ">=10.13.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/entities": {
|
||||||
|
"version": "4.5.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz",
|
||||||
|
"integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "BSD-2-Clause",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.12"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/fb55/entities?sponsor=1"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/error-ex": {
|
||||||
|
"version": "1.3.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.4.tgz",
|
||||||
|
"integrity": "sha512-sqQamAnR14VgCr1A618A3sGrygcpK+HEbenA/HiEAkkUwcZIIB/tgWqHFxWgOyDh4nB4JCRimh79dR5Ywc9MDQ==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"is-arrayish": "^0.2.1"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/esbuild": {
|
"node_modules/esbuild": {
|
||||||
"version": "0.27.3",
|
"version": "0.27.3",
|
||||||
"resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.3.tgz",
|
"resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.3.tgz",
|
||||||
@@ -2252,6 +2621,17 @@
|
|||||||
"url": "https://opencollective.com/eslint"
|
"url": "https://opencollective.com/eslint"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/esptool-js": {
|
||||||
|
"version": "0.5.7",
|
||||||
|
"resolved": "https://registry.npmjs.org/esptool-js/-/esptool-js-0.5.7.tgz",
|
||||||
|
"integrity": "sha512-k3pkXU9OTySCd58OUDjuJWNnFjM+QpPWAghxyWPm3zNfaLiP4ex2jNd7Rj0jWPu3/fgvwau236tetsTZrh4x5g==",
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"dependencies": {
|
||||||
|
"atob-lite": "^2.0.0",
|
||||||
|
"pako": "^2.1.0",
|
||||||
|
"tslib": "^2.4.1"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/esquery": {
|
"node_modules/esquery": {
|
||||||
"version": "1.7.0",
|
"version": "1.7.0",
|
||||||
"resolved": "https://registry.npmjs.org/esquery/-/esquery-1.7.0.tgz",
|
"resolved": "https://registry.npmjs.org/esquery/-/esquery-1.7.0.tgz",
|
||||||
@@ -2288,6 +2668,13 @@
|
|||||||
"node": ">=4.0"
|
"node": ">=4.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/estree-walker": {
|
||||||
|
"version": "2.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz",
|
||||||
|
"integrity": "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/esutils": {
|
"node_modules/esutils": {
|
||||||
"version": "2.0.3",
|
"version": "2.0.3",
|
||||||
"resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz",
|
"resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz",
|
||||||
@@ -2413,6 +2800,15 @@
|
|||||||
"node": ">=6.9.0"
|
"node": ">=6.9.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/get-caller-file": {
|
||||||
|
"version": "2.0.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz",
|
||||||
|
"integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==",
|
||||||
|
"license": "ISC",
|
||||||
|
"engines": {
|
||||||
|
"node": "6.* || 8.* || >= 10.*"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/glob-parent": {
|
"node_modules/glob-parent": {
|
||||||
"version": "6.0.2",
|
"version": "6.0.2",
|
||||||
"resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz",
|
"resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz",
|
||||||
@@ -2510,6 +2906,13 @@
|
|||||||
"node": ">=0.8.19"
|
"node": ">=0.8.19"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/is-arrayish": {
|
||||||
|
"version": "0.2.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz",
|
||||||
|
"integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/is-extglob": {
|
"node_modules/is-extglob": {
|
||||||
"version": "2.1.1",
|
"version": "2.1.1",
|
||||||
"resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz",
|
"resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz",
|
||||||
@@ -2520,6 +2923,15 @@
|
|||||||
"node": ">=0.10.0"
|
"node": ">=0.10.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/is-fullwidth-code-point": {
|
||||||
|
"version": "3.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
|
||||||
|
"integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=8"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/is-glob": {
|
"node_modules/is-glob": {
|
||||||
"version": "4.0.3",
|
"version": "4.0.3",
|
||||||
"resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz",
|
"resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz",
|
||||||
@@ -2590,6 +3002,13 @@
|
|||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
|
"node_modules/json-parse-even-better-errors": {
|
||||||
|
"version": "2.3.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz",
|
||||||
|
"integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/json-schema-traverse": {
|
"node_modules/json-schema-traverse": {
|
||||||
"version": "0.4.1",
|
"version": "0.4.1",
|
||||||
"resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz",
|
"resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz",
|
||||||
@@ -2908,6 +3327,13 @@
|
|||||||
"url": "https://opencollective.com/parcel"
|
"url": "https://opencollective.com/parcel"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/lines-and-columns": {
|
||||||
|
"version": "1.2.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz",
|
||||||
|
"integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/locate-path": {
|
"node_modules/locate-path": {
|
||||||
"version": "6.0.0",
|
"version": "6.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz",
|
||||||
@@ -2931,6 +3357,16 @@
|
|||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
|
"node_modules/lower-case": {
|
||||||
|
"version": "2.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/lower-case/-/lower-case-2.0.2.tgz",
|
||||||
|
"integrity": "sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"tslib": "^2.0.3"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/lru-cache": {
|
"node_modules/lru-cache": {
|
||||||
"version": "5.1.1",
|
"version": "5.1.1",
|
||||||
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz",
|
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz",
|
||||||
@@ -2997,6 +3433,17 @@
|
|||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
|
"node_modules/no-case": {
|
||||||
|
"version": "3.0.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/no-case/-/no-case-3.0.4.tgz",
|
||||||
|
"integrity": "sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"lower-case": "^2.0.2",
|
||||||
|
"tslib": "^2.0.3"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/node-releases": {
|
"node_modules/node-releases": {
|
||||||
"version": "2.0.27",
|
"version": "2.0.27",
|
||||||
"resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.27.tgz",
|
"resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.27.tgz",
|
||||||
@@ -3054,6 +3501,21 @@
|
|||||||
"url": "https://github.com/sponsors/sindresorhus"
|
"url": "https://github.com/sponsors/sindresorhus"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/p-try": {
|
||||||
|
"version": "2.2.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz",
|
||||||
|
"integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=6"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/pako": {
|
||||||
|
"version": "2.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/pako/-/pako-2.1.0.tgz",
|
||||||
|
"integrity": "sha512-w+eufiZ1WuJYgPXbV/PO3NCMEc3xqylkKHzp8bxp1uW4qaSNQUkwmLLEc3kKsfz8lpV1F8Ht3U1Cm+9Srog2ug==",
|
||||||
|
"license": "(MIT AND Zlib)"
|
||||||
|
},
|
||||||
"node_modules/parent-module": {
|
"node_modules/parent-module": {
|
||||||
"version": "1.0.1",
|
"version": "1.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz",
|
||||||
@@ -3067,11 +3529,29 @@
|
|||||||
"node": ">=6"
|
"node": ">=6"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/parse-json": {
|
||||||
|
"version": "5.2.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz",
|
||||||
|
"integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@babel/code-frame": "^7.0.0",
|
||||||
|
"error-ex": "^1.3.1",
|
||||||
|
"json-parse-even-better-errors": "^2.3.0",
|
||||||
|
"lines-and-columns": "^1.1.6"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=8"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/sindresorhus"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/path-exists": {
|
"node_modules/path-exists": {
|
||||||
"version": "4.0.0",
|
"version": "4.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
|
||||||
"integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
|
"integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=8"
|
"node": ">=8"
|
||||||
@@ -3087,6 +3567,16 @@
|
|||||||
"node": ">=8"
|
"node": ">=8"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/path-type": {
|
||||||
|
"version": "4.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz",
|
||||||
|
"integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=8"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/picocolors": {
|
"node_modules/picocolors": {
|
||||||
"version": "1.1.1",
|
"version": "1.1.1",
|
||||||
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz",
|
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz",
|
||||||
@@ -3107,6 +3597,15 @@
|
|||||||
"url": "https://github.com/sponsors/jonschlinkert"
|
"url": "https://github.com/sponsors/jonschlinkert"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/pngjs": {
|
||||||
|
"version": "5.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/pngjs/-/pngjs-5.0.0.tgz",
|
||||||
|
"integrity": "sha512-40QW5YalBNfQo5yRYmiw7Yz6TKKVr3h6970B2YE+3fQpsWcrbj1PzJgxeJ19DRQjhMbKPIuMY8rFaXc8moolVw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=10.13.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/postcss": {
|
"node_modules/postcss": {
|
||||||
"version": "8.5.6",
|
"version": "8.5.6",
|
||||||
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz",
|
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz",
|
||||||
@@ -3156,6 +3655,23 @@
|
|||||||
"node": ">=6"
|
"node": ">=6"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/qrcode": {
|
||||||
|
"version": "1.5.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/qrcode/-/qrcode-1.5.4.tgz",
|
||||||
|
"integrity": "sha512-1ca71Zgiu6ORjHqFBDpnSMTR2ReToX4l1Au1VFLyVeBTFavzQnv5JxMFr3ukHVKpSrSA2MCk0lNJSykjUfz7Zg==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"dijkstrajs": "^1.0.1",
|
||||||
|
"pngjs": "^5.0.0",
|
||||||
|
"yargs": "^15.3.1"
|
||||||
|
},
|
||||||
|
"bin": {
|
||||||
|
"qrcode": "bin/qrcode"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=10.13.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/react": {
|
"node_modules/react": {
|
||||||
"version": "19.2.4",
|
"version": "19.2.4",
|
||||||
"resolved": "https://registry.npmjs.org/react/-/react-19.2.4.tgz",
|
"resolved": "https://registry.npmjs.org/react/-/react-19.2.4.tgz",
|
||||||
@@ -3239,6 +3755,21 @@
|
|||||||
"react-dom": ">=18"
|
"react-dom": ">=18"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/require-directory": {
|
||||||
|
"version": "2.1.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz",
|
||||||
|
"integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.10.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/require-main-filename": {
|
||||||
|
"version": "2.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz",
|
||||||
|
"integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==",
|
||||||
|
"license": "ISC"
|
||||||
|
},
|
||||||
"node_modules/resolve-from": {
|
"node_modules/resolve-from": {
|
||||||
"version": "4.0.0",
|
"version": "4.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz",
|
||||||
@@ -3310,6 +3841,12 @@
|
|||||||
"semver": "bin/semver.js"
|
"semver": "bin/semver.js"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/set-blocking": {
|
||||||
|
"version": "2.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz",
|
||||||
|
"integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==",
|
||||||
|
"license": "ISC"
|
||||||
|
},
|
||||||
"node_modules/set-cookie-parser": {
|
"node_modules/set-cookie-parser": {
|
||||||
"version": "2.7.2",
|
"version": "2.7.2",
|
||||||
"resolved": "https://registry.npmjs.org/set-cookie-parser/-/set-cookie-parser-2.7.2.tgz",
|
"resolved": "https://registry.npmjs.org/set-cookie-parser/-/set-cookie-parser-2.7.2.tgz",
|
||||||
@@ -3339,6 +3876,17 @@
|
|||||||
"node": ">=8"
|
"node": ">=8"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/snake-case": {
|
||||||
|
"version": "3.0.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/snake-case/-/snake-case-3.0.4.tgz",
|
||||||
|
"integrity": "sha512-LAOh4z89bGQvl9pFfNF8V146i7o7/CqFPbqzYgP+yYzDIDeS9HaNFtXABamRW+AQzEVODcvE79ljJ+8a9YSdMg==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"dot-case": "^3.0.4",
|
||||||
|
"tslib": "^2.0.3"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/source-map-js": {
|
"node_modules/source-map-js": {
|
||||||
"version": "1.2.1",
|
"version": "1.2.1",
|
||||||
"resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz",
|
"resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz",
|
||||||
@@ -3349,6 +3897,32 @@
|
|||||||
"node": ">=0.10.0"
|
"node": ">=0.10.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/string-width": {
|
||||||
|
"version": "4.2.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
|
||||||
|
"integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"emoji-regex": "^8.0.0",
|
||||||
|
"is-fullwidth-code-point": "^3.0.0",
|
||||||
|
"strip-ansi": "^6.0.1"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=8"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/strip-ansi": {
|
||||||
|
"version": "6.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
|
||||||
|
"integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"ansi-regex": "^5.0.1"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=8"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/strip-json-comments": {
|
"node_modules/strip-json-comments": {
|
||||||
"version": "3.1.1",
|
"version": "3.1.1",
|
||||||
"resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz",
|
"resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz",
|
||||||
@@ -3375,6 +3949,13 @@
|
|||||||
"node": ">=8"
|
"node": ">=8"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/svg-parser": {
|
||||||
|
"version": "2.0.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/svg-parser/-/svg-parser-2.0.4.tgz",
|
||||||
|
"integrity": "sha512-e4hG1hRwoOdRb37cIMSgzNsxyzKfayW6VOflrwvR+/bzrkyxY/31WkbgnQpgtrNp1SdpJvpUAGTa/ZoiPNDuRQ==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/tailwindcss": {
|
"node_modules/tailwindcss": {
|
||||||
"version": "4.1.18",
|
"version": "4.1.18",
|
||||||
"resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-4.1.18.tgz",
|
"resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-4.1.18.tgz",
|
||||||
@@ -3413,6 +3994,12 @@
|
|||||||
"url": "https://github.com/sponsors/SuperchupuDev"
|
"url": "https://github.com/sponsors/SuperchupuDev"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/tslib": {
|
||||||
|
"version": "2.8.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
|
||||||
|
"integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==",
|
||||||
|
"license": "0BSD"
|
||||||
|
},
|
||||||
"node_modules/type-check": {
|
"node_modules/type-check": {
|
||||||
"version": "0.4.0",
|
"version": "0.4.0",
|
||||||
"resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz",
|
"resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz",
|
||||||
@@ -3542,6 +4129,21 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/vite-plugin-svgr": {
|
||||||
|
"version": "4.5.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/vite-plugin-svgr/-/vite-plugin-svgr-4.5.0.tgz",
|
||||||
|
"integrity": "sha512-W+uoSpmVkSmNOGPSsDCWVW/DDAyv+9fap9AZXBvWiQqrboJ08j2vh0tFxTD/LjwqwAd3yYSVJgm54S/1GhbdnA==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@rollup/pluginutils": "^5.2.0",
|
||||||
|
"@svgr/core": "^8.1.0",
|
||||||
|
"@svgr/plugin-jsx": "^8.1.0"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"vite": ">=2.6.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/which": {
|
"node_modules/which": {
|
||||||
"version": "2.0.2",
|
"version": "2.0.2",
|
||||||
"resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
|
"resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
|
||||||
@@ -3558,6 +4160,12 @@
|
|||||||
"node": ">= 8"
|
"node": ">= 8"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/which-module": {
|
||||||
|
"version": "2.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.1.tgz",
|
||||||
|
"integrity": "sha512-iBdZ57RDvnOR9AGBhML2vFZf7h8vmBjhoaZqODJBFWHVtKkDmKuHai3cx5PgVMrX5YDNp27AofYbAwctSS+vhQ==",
|
||||||
|
"license": "ISC"
|
||||||
|
},
|
||||||
"node_modules/word-wrap": {
|
"node_modules/word-wrap": {
|
||||||
"version": "1.2.5",
|
"version": "1.2.5",
|
||||||
"resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz",
|
"resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz",
|
||||||
@@ -3568,6 +4176,26 @@
|
|||||||
"node": ">=0.10.0"
|
"node": ">=0.10.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/wrap-ansi": {
|
||||||
|
"version": "6.2.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz",
|
||||||
|
"integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"ansi-styles": "^4.0.0",
|
||||||
|
"string-width": "^4.1.0",
|
||||||
|
"strip-ansi": "^6.0.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=8"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/y18n": {
|
||||||
|
"version": "4.0.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.3.tgz",
|
||||||
|
"integrity": "sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ==",
|
||||||
|
"license": "ISC"
|
||||||
|
},
|
||||||
"node_modules/yallist": {
|
"node_modules/yallist": {
|
||||||
"version": "3.1.1",
|
"version": "3.1.1",
|
||||||
"resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz",
|
"resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz",
|
||||||
@@ -3575,6 +4203,102 @@
|
|||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "ISC"
|
"license": "ISC"
|
||||||
},
|
},
|
||||||
|
"node_modules/yargs": {
|
||||||
|
"version": "15.4.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/yargs/-/yargs-15.4.1.tgz",
|
||||||
|
"integrity": "sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"cliui": "^6.0.0",
|
||||||
|
"decamelize": "^1.2.0",
|
||||||
|
"find-up": "^4.1.0",
|
||||||
|
"get-caller-file": "^2.0.1",
|
||||||
|
"require-directory": "^2.1.1",
|
||||||
|
"require-main-filename": "^2.0.0",
|
||||||
|
"set-blocking": "^2.0.0",
|
||||||
|
"string-width": "^4.2.0",
|
||||||
|
"which-module": "^2.0.0",
|
||||||
|
"y18n": "^4.0.0",
|
||||||
|
"yargs-parser": "^18.1.2"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=8"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/yargs-parser": {
|
||||||
|
"version": "18.1.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-18.1.3.tgz",
|
||||||
|
"integrity": "sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==",
|
||||||
|
"license": "ISC",
|
||||||
|
"dependencies": {
|
||||||
|
"camelcase": "^5.0.0",
|
||||||
|
"decamelize": "^1.2.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=6"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/yargs-parser/node_modules/camelcase": {
|
||||||
|
"version": "5.3.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz",
|
||||||
|
"integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=6"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/yargs/node_modules/find-up": {
|
||||||
|
"version": "4.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz",
|
||||||
|
"integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"locate-path": "^5.0.0",
|
||||||
|
"path-exists": "^4.0.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=8"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/yargs/node_modules/locate-path": {
|
||||||
|
"version": "5.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz",
|
||||||
|
"integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"p-locate": "^4.1.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=8"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/yargs/node_modules/p-limit": {
|
||||||
|
"version": "2.3.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz",
|
||||||
|
"integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"p-try": "^2.0.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=6"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/sindresorhus"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/yargs/node_modules/p-locate": {
|
||||||
|
"version": "4.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz",
|
||||||
|
"integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"p-limit": "^2.2.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=8"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/yocto-queue": {
|
"node_modules/yocto-queue": {
|
||||||
"version": "0.1.0",
|
"version": "0.1.0",
|
||||||
"resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz",
|
"resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz",
|
||||||
|
|||||||
@@ -10,7 +10,9 @@
|
|||||||
"preview": "vite preview"
|
"preview": "vite preview"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"esptool-js": "^0.5.7",
|
||||||
"leaflet": "^1.9.4",
|
"leaflet": "^1.9.4",
|
||||||
|
"qrcode": "^1.5.4",
|
||||||
"react": "^19.2.0",
|
"react": "^19.2.0",
|
||||||
"react-dom": "^19.2.0",
|
"react-dom": "^19.2.0",
|
||||||
"react-leaflet": "^5.0.0",
|
"react-leaflet": "^5.0.0",
|
||||||
@@ -27,6 +29,7 @@
|
|||||||
"eslint-plugin-react-refresh": "^0.4.24",
|
"eslint-plugin-react-refresh": "^0.4.24",
|
||||||
"globals": "^16.5.0",
|
"globals": "^16.5.0",
|
||||||
"tailwindcss": "^4.1.18",
|
"tailwindcss": "^4.1.18",
|
||||||
"vite": "^7.3.1"
|
"vite": "^7.3.1",
|
||||||
|
"vite-plugin-svgr": "^4.5.0"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
BIN
frontend/public/devices/Strikers/striker_size_1.png
Normal file
|
After Width: | Height: | Size: 9.1 KiB |
BIN
frontend/public/devices/Strikers/striker_size_2.png
Normal file
|
After Width: | Height: | Size: 11 KiB |