diff --git a/backend/builder/database.py b/backend/builder/database.py index e2678c3..9bb8498 100644 --- a/backend/builder/database.py +++ b/backend/builder/database.py @@ -5,23 +5,34 @@ from database import get_db logger = logging.getLogger("builder.database") -async def insert_built_melody(melody_id: str, name: str, pid: str, steps: str) -> None: +async def insert_built_melody(melody_id: str, name: str, pid: str, steps: str, is_builtin: bool = False) -> None: db = await get_db() await db.execute( - """INSERT INTO built_melodies (id, name, pid, steps, assigned_melody_ids) - VALUES (?, ?, ?, ?, ?)""", - (melody_id, name, pid, steps, json.dumps([])), + """INSERT INTO built_melodies (id, name, pid, steps, assigned_melody_ids, is_builtin) + VALUES (?, ?, ?, ?, ?, ?)""", + (melody_id, name, pid, steps, json.dumps([]), 1 if is_builtin else 0), ) await db.commit() -async def update_built_melody(melody_id: str, name: str, pid: str, steps: str) -> None: +async def update_built_melody(melody_id: str, name: str, pid: str, steps: str, is_builtin: bool = False) -> None: db = await get_db() await db.execute( """UPDATE built_melodies - SET name = ?, pid = ?, steps = ?, updated_at = datetime('now') + SET name = ?, pid = ?, steps = ?, is_builtin = ?, updated_at = datetime('now') WHERE id = ?""", - (name, pid, steps, melody_id), + (name, pid, steps, 1 if is_builtin else 0, melody_id), + ) + await db.commit() + + +async def update_builtin_flag(melody_id: str, is_builtin: bool) -> None: + db = await get_db() + await db.execute( + """UPDATE built_melodies + SET is_builtin = ?, updated_at = datetime('now') + WHERE id = ?""", + (1 if is_builtin else 0, melody_id), ) await db.commit() @@ -68,6 +79,7 @@ async def get_built_melody(melody_id: str) -> dict | None: return None row = dict(rows[0]) row["assigned_melody_ids"] = json.loads(row["assigned_melody_ids"] or "[]") + row["is_builtin"] = bool(row.get("is_builtin", 0)) return row @@ -80,6 +92,7 @@ async def list_built_melodies() -> list[dict]: for row in rows: r = dict(row) r["assigned_melody_ids"] = json.loads(r["assigned_melody_ids"] or "[]") + r["is_builtin"] = bool(r.get("is_builtin", 0)) results.append(r) return results diff --git a/backend/builder/models.py b/backend/builder/models.py index 6c6f2d5..74ab282 100644 --- a/backend/builder/models.py +++ b/backend/builder/models.py @@ -6,12 +6,14 @@ class BuiltMelodyCreate(BaseModel): name: str pid: str steps: str # raw step string e.g. "1,2,2+1,1,2,3+1" + is_builtin: bool = False class BuiltMelodyUpdate(BaseModel): name: Optional[str] = None pid: Optional[str] = None steps: Optional[str] = None + is_builtin: Optional[bool] = None class BuiltMelodyInDB(BaseModel): @@ -19,6 +21,7 @@ class BuiltMelodyInDB(BaseModel): name: str pid: str steps: str + is_builtin: bool = False binary_path: Optional[str] = None binary_url: Optional[str] = None progmem_code: Optional[str] = None diff --git a/backend/builder/router.py b/backend/builder/router.py index b6d6596..abc1e9e 100644 --- a/backend/builder/router.py +++ b/backend/builder/router.py @@ -1,5 +1,5 @@ from fastapi import APIRouter, Depends, HTTPException -from fastapi.responses import FileResponse +from fastapi.responses import FileResponse, PlainTextResponse from auth.models import TokenPayload from auth.dependencies import require_permission from builder.models import ( @@ -20,6 +20,7 @@ async def list_built_melodies( melodies = await service.list_built_melodies() return BuiltMelodyListResponse(melodies=melodies, total=len(melodies)) + @router.get("/for-melody/{firestore_melody_id}") async def get_for_firestore_melody( firestore_melody_id: str, @@ -32,6 +33,14 @@ async def get_for_firestore_melody( return result.model_dump() +@router.get("/generate-builtin-list") +async def generate_builtin_list( + _user: TokenPayload = Depends(require_permission("melodies", "view")), +): + """Generate a C++ header with PROGMEM arrays for all is_builtin archetypes.""" + code = await service.generate_builtin_list() + return PlainTextResponse(content=code, media_type="text/plain") + @router.get("/{melody_id}", response_model=BuiltMelodyInDB) async def get_built_melody( @@ -66,6 +75,15 @@ async def delete_built_melody( await service.delete_built_melody(melody_id) +@router.post("/{melody_id}/toggle-builtin", response_model=BuiltMelodyInDB) +async def toggle_builtin( + melody_id: str, + _user: TokenPayload = Depends(require_permission("melodies", "edit")), +): + """Toggle the is_builtin flag for an archetype.""" + return await service.toggle_builtin(melody_id) + + @router.post("/{melody_id}/build-binary", response_model=BuiltMelodyInDB) async def build_binary( melody_id: str, diff --git a/backend/builder/service.py b/backend/builder/service.py index c20f7c4..cdd968c 100644 --- a/backend/builder/service.py +++ b/backend/builder/service.py @@ -32,6 +32,7 @@ def _row_to_built_melody(row: dict) -> BuiltMelodyInDB: name=row["name"], pid=row["pid"], steps=row["steps"], + is_builtin=row.get("is_builtin", False), binary_path=binary_path, binary_url=binary_url, progmem_code=row.get("progmem_code"), @@ -151,8 +152,12 @@ async def create_built_melody(data: BuiltMelodyCreate) -> BuiltMelodyInDB: name=data.name, pid=data.pid, steps=data.steps, + is_builtin=data.is_builtin, ) - return await get_built_melody(melody_id) + # Auto-build binary and builtin code on creation + result = await get_built_melody(melody_id) + result = await _do_build(melody_id) + return result async def update_built_melody(melody_id: str, data: BuiltMelodyUpdate) -> BuiltMelodyInDB: @@ -163,11 +168,22 @@ async def update_built_melody(melody_id: str, data: BuiltMelodyUpdate) -> BuiltM new_name = data.name if data.name is not None else row["name"] new_pid = data.pid if data.pid is not None else row["pid"] new_steps = data.steps if data.steps is not None else row["steps"] + new_is_builtin = data.is_builtin if data.is_builtin is not None else row.get("is_builtin", False) await _check_unique(new_name, new_pid or "", exclude_id=melody_id) - await db.update_built_melody(melody_id, name=new_name, pid=new_pid, steps=new_steps) - return await get_built_melody(melody_id) + steps_changed = (data.steps is not None) and (data.steps != row["steps"]) + + await db.update_built_melody(melody_id, name=new_name, pid=new_pid, steps=new_steps, is_builtin=new_is_builtin) + + # If steps changed, flag all assigned melodies as outdated, then rebuild + if steps_changed: + assigned_ids = row.get("assigned_melody_ids", []) + if assigned_ids: + await _flag_melodies_outdated(assigned_ids, True) + + # Auto-rebuild binary and builtin code on every save + return await _do_build(melody_id) async def delete_built_melody(melody_id: str) -> None: @@ -175,6 +191,11 @@ async def delete_built_melody(melody_id: str) -> None: if not row: raise HTTPException(status_code=404, detail=f"Built melody '{melody_id}' not found") + # Flag all assigned melodies as outdated before deleting + assigned_ids = row.get("assigned_melody_ids", []) + if assigned_ids: + await _flag_melodies_outdated(assigned_ids, True) + # Delete the .bsm file if it exists if row.get("binary_path"): bsm_path = Path(row["binary_path"]) @@ -184,10 +205,26 @@ async def delete_built_melody(melody_id: str) -> None: await db.delete_built_melody(melody_id) +async def toggle_builtin(melody_id: str) -> BuiltMelodyInDB: + """Toggle the is_builtin flag for an archetype.""" + row = await db.get_built_melody(melody_id) + if not row: + raise HTTPException(status_code=404, detail=f"Built melody '{melody_id}' not found") + new_value = not row.get("is_builtin", False) + await db.update_builtin_flag(melody_id, new_value) + return await get_built_melody(melody_id) + + # ============================================================================ # Build Actions # ============================================================================ +async def _do_build(melody_id: str) -> BuiltMelodyInDB: + """Internal: build both binary and PROGMEM code, return updated record.""" + await build_binary(melody_id) + return await build_builtin_code(melody_id) + + async def build_binary(melody_id: str) -> BuiltMelodyInDB: """Parse steps and write a .bsm binary file to storage.""" row = await db.get_built_melody(melody_id) @@ -236,6 +273,48 @@ async def get_binary_path(melody_id: str) -> Optional[Path]: return path +async def generate_builtin_list() -> str: + """Generate a C++ header with PROGMEM arrays for all is_builtin archetypes.""" + rows = await db.list_built_melodies() + builtin_rows = [r for r in rows if r.get("is_builtin")] + + if not builtin_rows: + return "// No built-in archetypes defined.\n" + + timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S") + parts = [ + f"// Auto-generated Built-in Archetype List", + f"// Generated: {timestamp}", + f"// Total built-ins: {len(builtin_rows)}", + "", + "#pragma once", + "#include ", + "", + ] + + entry_refs = [] + for row in builtin_rows: + values = steps_string_to_values(row["steps"]) + array_name = f"melody_builtin_{row['name'].lower().replace(' ', '_')}" + display_name = row["name"].replace("_", " ").title() + pid = row.get("pid") or f"builtin_{row['name'].lower()}" + + parts.append(f"// {display_name} | PID: {pid} | Steps: {len(values)}") + parts.append(format_melody_array(row["name"].lower().replace(" ", "_"), values)) + parts.append("") + entry_refs.append((display_name, pid, array_name, len(values))) + + # Generate MELODY_LIBRARY array + parts.append("// --- MELODY_LIBRARY entries ---") + parts.append("// Add these to your firmware's MELODY_LIBRARY[] array:") + parts.append("// {") + for display_name, pid, array_name, step_count in entry_refs: + parts.append(f'// {{ "{display_name}", "{pid}", {array_name}, {step_count} }},') + parts.append("// };") + + return "\n".join(parts) + + # ============================================================================ # Assignment # ============================================================================ @@ -251,6 +330,9 @@ async def assign_to_melody(built_id: str, firestore_melody_id: str) -> BuiltMelo assigned.append(firestore_melody_id) await db.update_assigned_melody_ids(built_id, assigned) + # Clear outdated flag on the melody being assigned + await _flag_melodies_outdated([firestore_melody_id], False) + return await get_built_melody(built_id) @@ -262,6 +344,10 @@ async def unassign_from_melody(built_id: str, firestore_melody_id: str) -> Built assigned = [mid for mid in row.get("assigned_melody_ids", []) if mid != firestore_melody_id] await db.update_assigned_melody_ids(built_id, assigned) + + # Flag the melody as outdated since it no longer has an archetype + await _flag_melodies_outdated([firestore_melody_id], True) + return await get_built_melody(built_id) @@ -272,3 +358,48 @@ async def get_built_melody_for_firestore_id(firestore_melody_id: str) -> Optiona if firestore_melody_id in row.get("assigned_melody_ids", []): return _row_to_built_melody(row) return None + + +# ============================================================================ +# Outdated Flag Helpers +# ============================================================================ + +async def _flag_melodies_outdated(melody_ids: List[str], outdated: bool) -> None: + """Set or clear the outdated_archetype flag on a list of Firestore melody IDs. + + This updates both SQLite (melody_drafts) and Firestore (published melodies). + We import inline to avoid circular imports. + """ + if not melody_ids: + return + + try: + from melodies import database as melody_db + from shared.firebase import get_db as get_firestore + except ImportError: + logger.warning("Could not import melody/firebase modules — skipping outdated flag update") + return + + firestore_db = get_firestore() + + for melody_id in melody_ids: + try: + row = await melody_db.get_melody(melody_id) + if not row: + continue + + data = row["data"] + info = dict(data.get("information", {})) + info["outdated_archetype"] = outdated + data["information"] = info + + await melody_db.update_melody(melody_id, data) + + # If published, also update Firestore + if row.get("status") == "published": + doc_ref = firestore_db.collection("melodies").document(melody_id) + doc_ref.update({"information.outdated_archetype": outdated}) + + logger.info(f"Set outdated_archetype={outdated} on melody {melody_id}") + except Exception as e: + logger.error(f"Failed to set outdated flag on melody {melody_id}: {e}") diff --git a/backend/database/core.py b/backend/database/core.py index 9583561..6bc7c28 100644 --- a/backend/database/core.py +++ b/backend/database/core.py @@ -207,6 +207,7 @@ async def init_db(): "ALTER TABLE crm_media ADD COLUMN thumbnail_path TEXT", "ALTER TABLE crm_quotation_items ADD COLUMN description_en TEXT", "ALTER TABLE crm_quotation_items ADD COLUMN description_gr TEXT", + "ALTER TABLE built_melodies ADD COLUMN is_builtin INTEGER NOT NULL DEFAULT 0", ] for m in _migrations: try: diff --git a/backend/devices/models.py b/backend/devices/models.py index 8ded275..9c3acec 100644 --- a/backend/devices/models.py +++ b/backend/devices/models.py @@ -126,6 +126,12 @@ class DeviceCreate(BaseModel): websocket_url: str = "" churchAssistantURL: str = "" staffNotes: str = "" + hw_family: str = "" + hw_revision: str = "" + tags: List[str] = [] + serial_number: str = "" + customer_id: str = "" + mfg_status: str = "" class DeviceUpdate(BaseModel): @@ -145,10 +151,16 @@ class DeviceUpdate(BaseModel): websocket_url: Optional[str] = None churchAssistantURL: Optional[str] = None staffNotes: Optional[str] = None + hw_family: Optional[str] = None + hw_revision: Optional[str] = None + tags: Optional[List[str]] = None + customer_id: Optional[str] = None + mfg_status: Optional[str] = None class DeviceInDB(DeviceCreate): id: str + # Legacy field — kept for backwards compat; new docs use serial_number device_id: str = "" @@ -157,6 +169,15 @@ class DeviceListResponse(BaseModel): total: int +class DeviceNoteCreate(BaseModel): + content: str + created_by: str = "" + + +class DeviceNoteUpdate(BaseModel): + content: str + + class DeviceUserInfo(BaseModel): """User info resolved from device_users sub-collection or user_list.""" user_id: str = "" diff --git a/backend/devices/router.py b/backend/devices/router.py index d2129ac..040b6a7 100644 --- a/backend/devices/router.py +++ b/backend/devices/router.py @@ -1,17 +1,25 @@ -from fastapi import APIRouter, Depends, Query -from typing import Optional +import uuid +from datetime import datetime +from fastapi import APIRouter, Depends, Query, HTTPException +from typing import Optional, List +from pydantic import BaseModel from auth.models import TokenPayload from auth.dependencies import require_permission from devices.models import ( DeviceCreate, DeviceUpdate, DeviceInDB, DeviceListResponse, DeviceUsersResponse, DeviceUserInfo, + DeviceNoteCreate, DeviceNoteUpdate, ) from devices import service import database as mqtt_db from mqtt.models import DeviceAlertEntry, DeviceAlertsResponse +from shared.firebase import get_db as get_firestore router = APIRouter(prefix="/api/devices", tags=["devices"]) +NOTES_COLLECTION = "notes" +CRM_COLLECTION = "crm_customers" + @router.get("", response_model=DeviceListResponse) async def list_devices( @@ -79,3 +87,375 @@ async def get_device_alerts( """Return the current active alert set for a device. Empty list means fully healthy.""" rows = await mqtt_db.get_alerts(device_id) return DeviceAlertsResponse(alerts=[DeviceAlertEntry(**r) for r in rows]) + + +# ───────────────────────────────────────────────────────────────────────────── +# Device Notes +# ───────────────────────────────────────────────────────────────────────────── + +@router.get("/{device_id}/notes") +async def list_device_notes( + device_id: str, + _user: TokenPayload = Depends(require_permission("devices", "view")), +): + """List all notes for a device.""" + db = get_firestore() + docs = db.collection(NOTES_COLLECTION).where("device_id", "==", device_id).order_by("created_at").stream() + notes = [] + for doc in docs: + note = doc.to_dict() + note["id"] = doc.id + # Convert Firestore Timestamps to ISO strings + for f in ("created_at", "updated_at"): + if hasattr(note.get(f), "isoformat"): + note[f] = note[f].isoformat() + notes.append(note) + return {"notes": notes, "total": len(notes)} + + +@router.post("/{device_id}/notes", status_code=201) +async def create_device_note( + device_id: str, + body: DeviceNoteCreate, + _user: TokenPayload = Depends(require_permission("devices", "edit")), +): + """Create a new note for a device.""" + db = get_firestore() + now = datetime.utcnow() + note_id = str(uuid.uuid4()) + note_data = { + "device_id": device_id, + "content": body.content, + "created_by": body.created_by or _user.name or "", + "created_at": now, + "updated_at": now, + } + db.collection(NOTES_COLLECTION).document(note_id).set(note_data) + note_data["id"] = note_id + note_data["created_at"] = now.isoformat() + note_data["updated_at"] = now.isoformat() + return note_data + + +@router.put("/{device_id}/notes/{note_id}") +async def update_device_note( + device_id: str, + note_id: str, + body: DeviceNoteUpdate, + _user: TokenPayload = Depends(require_permission("devices", "edit")), +): + """Update an existing device note.""" + db = get_firestore() + doc_ref = db.collection(NOTES_COLLECTION).document(note_id) + doc = doc_ref.get() + if not doc.exists or doc.to_dict().get("device_id") != device_id: + raise HTTPException(status_code=404, detail="Note not found") + now = datetime.utcnow() + doc_ref.update({"content": body.content, "updated_at": now}) + updated = doc.to_dict() + updated["id"] = note_id + updated["content"] = body.content + updated["updated_at"] = now.isoformat() + if hasattr(updated.get("created_at"), "isoformat"): + updated["created_at"] = updated["created_at"].isoformat() + return updated + + +@router.delete("/{device_id}/notes/{note_id}", status_code=204) +async def delete_device_note( + device_id: str, + note_id: str, + _user: TokenPayload = Depends(require_permission("devices", "edit")), +): + """Delete a device note.""" + db = get_firestore() + doc_ref = db.collection(NOTES_COLLECTION).document(note_id) + doc = doc_ref.get() + if not doc.exists or doc.to_dict().get("device_id") != device_id: + raise HTTPException(status_code=404, detail="Note not found") + doc_ref.delete() + + +# ───────────────────────────────────────────────────────────────────────────── +# Device Tags +# ───────────────────────────────────────────────────────────────────────────── + +class TagsUpdate(BaseModel): + tags: List[str] + + +@router.put("/{device_id}/tags", response_model=DeviceInDB) +async def update_device_tags( + device_id: str, + body: TagsUpdate, + _user: TokenPayload = Depends(require_permission("devices", "edit")), +): + """Replace the tags list for a device.""" + return service.update_device(device_id, DeviceUpdate(tags=body.tags)) + + +# ───────────────────────────────────────────────────────────────────────────── +# Assign Device to Customer +# ───────────────────────────────────────────────────────────────────────────── + +class CustomerSearchResult(BaseModel): + id: str + name: str + email: str + organization: str = "" + + +class AssignCustomerBody(BaseModel): + customer_id: str + label: str = "" + + +@router.get("/{device_id}/customer-search") +async def search_customers_for_device( + device_id: str, + q: str = Query(""), + _user: TokenPayload = Depends(require_permission("devices", "view")), +): + """Search customers by name, email, phone, org, or tags, returning top 20 matches.""" + db = get_firestore() + docs = db.collection(CRM_COLLECTION).stream() + results = [] + q_lower = q.lower().strip() + for doc in docs: + data = doc.to_dict() + name = data.get("name", "") or "" + surname = data.get("surname", "") or "" + email = data.get("email", "") or "" + organization = data.get("organization", "") or "" + phone = data.get("phone", "") or "" + tags = " ".join(data.get("tags", []) or []) + location = data.get("location") or {} + city = location.get("city", "") or "" + searchable = f"{name} {surname} {email} {organization} {phone} {tags} {city}".lower() + if not q_lower or q_lower in searchable: + results.append({ + "id": doc.id, + "name": name, + "surname": surname, + "email": email, + "organization": organization, + "city": city, + }) + if len(results) >= 20: + break + return {"results": results} + + +@router.post("/{device_id}/assign-customer") +async def assign_device_to_customer( + device_id: str, + body: AssignCustomerBody, + _user: TokenPayload = Depends(require_permission("devices", "edit")), +): + """Assign a device to a customer. + + - Sets owner field on the device document. + - Adds a console_device entry to the customer's owned_items list. + """ + db = get_firestore() + + # Verify device exists + device = service.get_device(device_id) + + # Get customer + customer_ref = db.collection(CRM_COLLECTION).document(body.customer_id) + customer_doc = customer_ref.get() + if not customer_doc.exists: + raise HTTPException(status_code=404, detail="Customer not found") + customer_data = customer_doc.to_dict() + customer_email = customer_data.get("email", "") + + # Update device: owner email + customer_id + device_ref = db.collection("devices").document(device_id) + device_ref.update({"owner": customer_email, "customer_id": body.customer_id}) + + # Add to customer owned_items (avoid duplicates) + owned_items = customer_data.get("owned_items", []) or [] + already_assigned = any( + item.get("type") == "console_device" and item.get("console_device", {}).get("device_id") == device_id + for item in owned_items + ) + if not already_assigned: + owned_items.append({ + "type": "console_device", + "console_device": { + "device_id": device_id, + "label": body.label or device.device_name or device_id, + } + }) + customer_ref.update({"owned_items": owned_items}) + + return {"status": "assigned", "device_id": device_id, "customer_id": body.customer_id} + + +@router.delete("/{device_id}/assign-customer", status_code=204) +async def unassign_device_from_customer( + device_id: str, + customer_id: str = Query(...), + _user: TokenPayload = Depends(require_permission("devices", "edit")), +): + """Remove device assignment from a customer.""" + db = get_firestore() + + # Clear customer_id on device + device_ref = db.collection("devices").document(device_id) + device_ref.update({"customer_id": ""}) + + # Remove from customer owned_items + customer_ref = db.collection(CRM_COLLECTION).document(customer_id) + customer_doc = customer_ref.get() + if customer_doc.exists: + customer_data = customer_doc.to_dict() + owned_items = [ + item for item in (customer_data.get("owned_items") or []) + if not (item.get("type") == "console_device" and item.get("console_device", {}).get("device_id") == device_id) + ] + customer_ref.update({"owned_items": owned_items}) + + +# ───────────────────────────────────────────────────────────────────────────── +# Customer detail (for Owner display in fleet) +# ───────────────────────────────────────────────────────────────────────────── + +@router.get("/{device_id}/customer") +async def get_device_customer( + device_id: str, + _user: TokenPayload = Depends(require_permission("devices", "view")), +): + """Return basic customer details for a device's assigned customer_id.""" + db = get_firestore() + device_ref = db.collection("devices").document(device_id) + device_doc = device_ref.get() + if not device_doc.exists: + raise HTTPException(status_code=404, detail="Device not found") + device_data = device_doc.to_dict() or {} + customer_id = device_data.get("customer_id") + if not customer_id: + return {"customer": None} + customer_doc = db.collection(CRM_COLLECTION).document(customer_id).get() + if not customer_doc.exists: + return {"customer": None} + cd = customer_doc.to_dict() or {} + return { + "customer": { + "id": customer_doc.id, + "name": cd.get("name") or "", + "email": cd.get("email") or "", + "organization": cd.get("organization") or "", + "phone": cd.get("phone") or "", + } + } + + +# ───────────────────────────────────────────────────────────────────────────── +# User list management (for Manage tab — assign/remove users from user_list) +# ───────────────────────────────────────────────────────────────────────────── + +class UserSearchResult(BaseModel): + id: str + display_name: str = "" + email: str = "" + phone: str = "" + photo_url: str = "" + + +@router.get("/{device_id}/user-search") +async def search_users_for_device( + device_id: str, + q: str = Query(""), + _user: TokenPayload = Depends(require_permission("devices", "view")), +): + """Search the users collection by name, email, or phone.""" + db = get_firestore() + docs = db.collection("users").stream() + results = [] + q_lower = q.lower().strip() + for doc in docs: + data = doc.to_dict() or {} + name = (data.get("display_name") or "").lower() + email = (data.get("email") or "").lower() + phone = (data.get("phone") or "").lower() + if not q_lower or q_lower in name or q_lower in email or q_lower in phone: + results.append({ + "id": doc.id, + "display_name": data.get("display_name") or "", + "email": data.get("email") or "", + "phone": data.get("phone") or "", + "photo_url": data.get("photo_url") or "", + }) + if len(results) >= 20: + break + return {"results": results} + + +class AddUserBody(BaseModel): + user_id: str + + +@router.post("/{device_id}/user-list", status_code=200) +async def add_user_to_device( + device_id: str, + body: AddUserBody, + _user: TokenPayload = Depends(require_permission("devices", "edit")), +): + """Add a user reference to the device's user_list field.""" + db = get_firestore() + device_ref = db.collection("devices").document(device_id) + device_doc = device_ref.get() + if not device_doc.exists: + raise HTTPException(status_code=404, detail="Device not found") + + # Verify user exists + user_doc = db.collection("users").document(body.user_id).get() + if not user_doc.exists: + raise HTTPException(status_code=404, detail="User not found") + + data = device_doc.to_dict() or {} + user_list = data.get("user_list", []) or [] + + # Avoid duplicates — check both string paths and DocumentReferences + from google.cloud.firestore_v1 import DocumentReference as DocRef + existing_ids = set() + for entry in user_list: + if isinstance(entry, DocRef): + existing_ids.add(entry.id) + elif isinstance(entry, str): + existing_ids.add(entry.split("/")[-1]) + + if body.user_id not in existing_ids: + user_ref = db.collection("users").document(body.user_id) + user_list.append(user_ref) + device_ref.update({"user_list": user_list}) + + return {"status": "added", "user_id": body.user_id} + + +@router.delete("/{device_id}/user-list/{user_id}", status_code=200) +async def remove_user_from_device( + device_id: str, + user_id: str, + _user: TokenPayload = Depends(require_permission("devices", "edit")), +): + """Remove a user reference from the device's user_list field.""" + db = get_firestore() + device_ref = db.collection("devices").document(device_id) + device_doc = device_ref.get() + if not device_doc.exists: + raise HTTPException(status_code=404, detail="Device not found") + + data = device_doc.to_dict() or {} + user_list = data.get("user_list", []) or [] + + # Remove any entry that resolves to this user_id + new_list = [ + entry for entry in user_list + if not (isinstance(entry, str) and entry.split("/")[-1] == user_id) + ] + device_ref.update({"user_list": new_list}) + + return {"status": "removed", "user_id": user_id} diff --git a/backend/devices/service.py b/backend/devices/service.py index f091600..ff514d9 100644 --- a/backend/devices/service.py +++ b/backend/devices/service.py @@ -52,10 +52,11 @@ def _generate_serial_number() -> str: def _ensure_unique_serial(db) -> str: """Generate a serial number and verify it doesn't already exist in Firestore.""" existing_sns = set() - for doc in db.collection(COLLECTION).select(["device_id"]).stream(): + for doc in db.collection(COLLECTION).select(["serial_number"]).stream(): data = doc.to_dict() - if data.get("device_id"): - existing_sns.add(data["device_id"]) + sn = data.get("serial_number") or data.get("device_id") + if sn: + existing_sns.add(sn) for _ in range(100): # safety limit sn = _generate_serial_number() @@ -95,18 +96,40 @@ def _sanitize_dict(d: dict) -> dict: return result +def _auto_upgrade_claimed(doc_ref, data: dict) -> dict: + """If the device has entries in user_list and isn't already claimed/decommissioned, + upgrade mfg_status to 'claimed' automatically and return the updated data dict.""" + current_status = data.get("mfg_status", "") + if current_status in ("claimed", "decommissioned"): + return data + user_list = data.get("user_list", []) or [] + if user_list: + doc_ref.update({"mfg_status": "claimed"}) + data = dict(data) + data["mfg_status"] = "claimed" + return data + + def _doc_to_device(doc) -> DeviceInDB: - """Convert a Firestore document snapshot to a DeviceInDB model.""" - data = _sanitize_dict(doc.to_dict()) + """Convert a Firestore document snapshot to a DeviceInDB model. + + Also auto-upgrades mfg_status to 'claimed' if user_list is non-empty. + """ + raw = doc.to_dict() + raw = _auto_upgrade_claimed(doc.reference, raw) + data = _sanitize_dict(raw) return DeviceInDB(id=doc.id, **data) +FLEET_STATUSES = {"sold", "claimed"} + + def list_devices( search: str | None = None, online_only: bool | None = None, subscription_tier: str | None = None, ) -> list[DeviceInDB]: - """List devices with optional filters.""" + """List fleet devices (sold + claimed only) with optional filters.""" db = get_db() ref = db.collection(COLLECTION) query = ref @@ -118,6 +141,14 @@ def list_devices( results = [] for doc in docs: + raw = doc.to_dict() or {} + + # Only include sold/claimed devices in the fleet view. + # Legacy devices without mfg_status are included to avoid breaking old data. + mfg_status = raw.get("mfg_status") + if mfg_status and mfg_status not in FLEET_STATUSES: + continue + device = _doc_to_device(doc) # Client-side filters @@ -128,7 +159,7 @@ def list_devices( search_lower = search.lower() name_match = search_lower in (device.device_name or "").lower() location_match = search_lower in (device.device_location or "").lower() - sn_match = search_lower in (device.device_id or "").lower() + sn_match = search_lower in (device.serial_number or "").lower() if not (name_match or location_match or sn_match): continue diff --git a/backend/equipment/service.py b/backend/equipment/service.py index eb09ffa..eee4608 100644 --- a/backend/equipment/service.py +++ b/backend/equipment/service.py @@ -4,7 +4,7 @@ from shared.firebase import get_db from shared.exceptions import NotFoundError from equipment.models import NoteCreate, NoteUpdate, NoteInDB -COLLECTION = "equipment_notes" +COLLECTION = "notes" VALID_CATEGORIES = {"general", "maintenance", "installation", "issue", "action_item", "other"} diff --git a/backend/firmware/models.py b/backend/firmware/models.py index 17a41d6..80cf6b1 100644 --- a/backend/firmware/models.py +++ b/backend/firmware/models.py @@ -11,7 +11,7 @@ class UpdateType(str, Enum): class FirmwareVersion(BaseModel): id: str - hw_type: str # e.g. "vesper", "vesper_plus", "vesper_pro" + hw_type: str # e.g. "vesper", "vesper_plus", "vesper_pro", "bespoke" channel: str # "stable", "beta", "alpha", "testing" version: str # semver e.g. "1.5" filename: str @@ -20,8 +20,10 @@ class FirmwareVersion(BaseModel): update_type: UpdateType = UpdateType.mandatory min_fw_version: Optional[str] = None # minimum fw version required to install this uploaded_at: str - notes: Optional[str] = None + changelog: Optional[str] = None + release_note: Optional[str] = None is_latest: bool = False + bespoke_uid: Optional[str] = None # only set when hw_type == "bespoke" class FirmwareListResponse(BaseModel): @@ -57,7 +59,7 @@ class FirmwareMetadataResponse(BaseModel): min_fw_version: Optional[str] = None download_url: str uploaded_at: str - notes: Optional[str] = None + release_note: Optional[str] = None # Keep backwards-compatible alias diff --git a/backend/firmware/router.py b/backend/firmware/router.py index 9cde742..6ba2d50 100644 --- a/backend/firmware/router.py +++ b/backend/firmware/router.py @@ -1,5 +1,5 @@ -from fastapi import APIRouter, Depends, Query, UploadFile, File, Form -from fastapi.responses import FileResponse +from fastapi import APIRouter, Depends, Query, UploadFile, File, Form, HTTPException +from fastapi.responses import FileResponse, PlainTextResponse from pydantic import BaseModel from typing import Optional import logging @@ -22,7 +22,9 @@ async def upload_firmware( version: str = Form(...), update_type: UpdateType = Form(UpdateType.mandatory), min_fw_version: Optional[str] = Form(None), - notes: Optional[str] = Form(None), + changelog: Optional[str] = Form(None), + release_note: Optional[str] = Form(None), + bespoke_uid: Optional[str] = Form(None), file: UploadFile = File(...), _user: TokenPayload = Depends(require_permission("manufacturing", "add")), ): @@ -34,7 +36,9 @@ async def upload_firmware( file_bytes=file_bytes, update_type=update_type, min_fw_version=min_fw_version, - notes=notes, + changelog=changelog, + release_note=release_note, + bespoke_uid=bespoke_uid, ) @@ -61,6 +65,18 @@ def get_latest_firmware( return service.get_latest(hw_type, channel, hw_version=hw_version, current_version=current_version) +@router.get("/{hw_type}/{channel}/latest/changelog", response_class=PlainTextResponse) +def get_latest_changelog(hw_type: str, channel: str): + """Returns the full changelog for the latest firmware. Plain text.""" + return service.get_latest_changelog(hw_type, channel) + + +@router.get("/{hw_type}/{channel}/{version}/info/changelog", response_class=PlainTextResponse) +def get_version_changelog(hw_type: str, channel: str, version: str): + """Returns the full changelog for a specific firmware version. Plain text.""" + return service.get_version_changelog(hw_type, channel, version) + + @router.get("/{hw_type}/{channel}/{version}/info", response_model=FirmwareMetadataResponse) def get_firmware_info(hw_type: str, channel: str, version: str): """Returns metadata for a specific firmware version. @@ -80,6 +96,33 @@ def download_firmware(hw_type: str, channel: str, version: str): ) +@router.put("/{firmware_id}", response_model=FirmwareVersion) +async def edit_firmware( + firmware_id: str, + channel: Optional[str] = Form(None), + version: Optional[str] = Form(None), + update_type: Optional[UpdateType] = Form(None), + min_fw_version: Optional[str] = Form(None), + changelog: Optional[str] = Form(None), + release_note: Optional[str] = Form(None), + bespoke_uid: Optional[str] = Form(None), + file: Optional[UploadFile] = File(None), + _user: TokenPayload = Depends(require_permission("manufacturing", "add")), +): + file_bytes = await file.read() if file and file.filename else None + return service.edit_firmware( + doc_id=firmware_id, + channel=channel, + version=version, + update_type=update_type, + min_fw_version=min_fw_version, + changelog=changelog, + release_note=release_note, + bespoke_uid=bespoke_uid, + file_bytes=file_bytes, + ) + + @router.delete("/{firmware_id}", status_code=204) def delete_firmware( firmware_id: str, diff --git a/backend/firmware/service.py b/backend/firmware/service.py index aff62c9..175ca19 100644 --- a/backend/firmware/service.py +++ b/backend/firmware/service.py @@ -16,7 +16,7 @@ logger = logging.getLogger(__name__) COLLECTION = "firmware_versions" -VALID_HW_TYPES = {"vesper", "vesper_plus", "vesper_pro", "chronos", "chronos_pro", "agnus", "agnus_mini"} +VALID_HW_TYPES = {"vesper", "vesper_plus", "vesper_pro", "chronos", "chronos_pro", "agnus", "agnus_mini", "bespoke"} VALID_CHANNELS = {"stable", "beta", "alpha", "testing"} @@ -43,8 +43,10 @@ def _doc_to_firmware_version(doc) -> FirmwareVersion: update_type=data.get("update_type", UpdateType.mandatory), min_fw_version=data.get("min_fw_version"), uploaded_at=uploaded_str, - notes=data.get("notes"), + changelog=data.get("changelog"), + release_note=data.get("release_note"), is_latest=data.get("is_latest", False), + bespoke_uid=data.get("bespoke_uid"), ) @@ -65,7 +67,7 @@ def _fw_to_metadata_response(fw: FirmwareVersion) -> FirmwareMetadataResponse: min_fw_version=fw.min_fw_version, download_url=download_url, uploaded_at=fw.uploaded_at, - notes=fw.notes, + release_note=fw.release_note, ) @@ -76,33 +78,59 @@ def upload_firmware( file_bytes: bytes, update_type: UpdateType = UpdateType.mandatory, min_fw_version: str | None = None, - notes: str | None = None, + changelog: str | None = None, + release_note: str | None = None, + bespoke_uid: str | None = None, ) -> FirmwareVersion: if hw_type not in VALID_HW_TYPES: raise HTTPException(status_code=400, detail=f"Invalid hw_type. Must be one of: {', '.join(sorted(VALID_HW_TYPES))}") if channel not in VALID_CHANNELS: raise HTTPException(status_code=400, detail=f"Invalid channel. Must be one of: {', '.join(sorted(VALID_CHANNELS))}") + if hw_type == "bespoke" and not bespoke_uid: + raise HTTPException(status_code=400, detail="bespoke_uid is required when hw_type is 'bespoke'") + + db = get_db() + sha256 = hashlib.sha256(file_bytes).hexdigest() + now = datetime.now(timezone.utc) + + # For bespoke firmware: if a firmware with the same bespoke_uid already exists, + # overwrite it (delete old doc + file, reuse same storage path keyed by uid). + if hw_type == "bespoke" and bespoke_uid: + existing_docs = list( + db.collection(COLLECTION) + .where("hw_type", "==", "bespoke") + .where("bespoke_uid", "==", bespoke_uid) + .stream() + ) + for old_doc in existing_docs: + old_data = old_doc.to_dict() or {} + old_path = _storage_path("bespoke", old_data.get("channel", channel), old_data.get("version", version)) + if old_path.exists(): + old_path.unlink() + try: + old_path.parent.rmdir() + except OSError: + pass + old_doc.reference.delete() dest = _storage_path(hw_type, channel, version) dest.parent.mkdir(parents=True, exist_ok=True) dest.write_bytes(file_bytes) - sha256 = hashlib.sha256(file_bytes).hexdigest() - now = datetime.now(timezone.utc) doc_id = str(uuid.uuid4()) - db = get_db() - # Mark previous latest for this hw_type+channel as no longer latest - prev_docs = ( - db.collection(COLLECTION) - .where("hw_type", "==", hw_type) - .where("channel", "==", channel) - .where("is_latest", "==", True) - .stream() - ) - for prev in prev_docs: - prev.reference.update({"is_latest": False}) + # (skip for bespoke — each bespoke_uid is its own independent firmware) + if hw_type != "bespoke": + prev_docs = ( + db.collection(COLLECTION) + .where("hw_type", "==", hw_type) + .where("channel", "==", channel) + .where("is_latest", "==", True) + .stream() + ) + for prev in prev_docs: + prev.reference.update({"is_latest": False}) doc_ref = db.collection(COLLECTION).document(doc_id) doc_ref.set({ @@ -115,8 +143,10 @@ def upload_firmware( "update_type": update_type.value, "min_fw_version": min_fw_version, "uploaded_at": now, - "notes": notes, + "changelog": changelog, + "release_note": release_note, "is_latest": True, + "bespoke_uid": bespoke_uid, }) return _doc_to_firmware_version(doc_ref.get()) @@ -142,6 +172,8 @@ def list_firmware( def get_latest(hw_type: str, channel: str, hw_version: str | None = None, current_version: str | None = None) -> FirmwareMetadataResponse: if hw_type not in VALID_HW_TYPES: raise HTTPException(status_code=400, detail=f"Invalid hw_type '{hw_type}'") + if hw_type == "bespoke": + raise HTTPException(status_code=400, detail="Bespoke firmware is not served via auto-update. Use the direct download URL.") if channel not in VALID_CHANNELS: raise HTTPException(status_code=400, detail=f"Invalid channel '{channel}'") @@ -182,6 +214,52 @@ def get_version_info(hw_type: str, channel: str, version: str) -> FirmwareMetada return _fw_to_metadata_response(_doc_to_firmware_version(docs[0])) +def get_latest_changelog(hw_type: str, channel: str) -> str: + if hw_type not in VALID_HW_TYPES: + raise HTTPException(status_code=400, detail=f"Invalid hw_type '{hw_type}'") + if channel not in VALID_CHANNELS: + raise HTTPException(status_code=400, detail=f"Invalid channel '{channel}'") + + db = get_db() + docs = list( + db.collection(COLLECTION) + .where("hw_type", "==", hw_type) + .where("channel", "==", channel) + .where("is_latest", "==", True) + .limit(1) + .stream() + ) + if not docs: + raise NotFoundError("Firmware") + fw = _doc_to_firmware_version(docs[0]) + if not fw.changelog: + raise NotFoundError("Changelog") + return fw.changelog + + +def get_version_changelog(hw_type: str, channel: str, version: str) -> str: + if hw_type not in VALID_HW_TYPES: + raise HTTPException(status_code=400, detail=f"Invalid hw_type '{hw_type}'") + if channel not in VALID_CHANNELS: + raise HTTPException(status_code=400, detail=f"Invalid channel '{channel}'") + + db = get_db() + docs = list( + db.collection(COLLECTION) + .where("hw_type", "==", hw_type) + .where("channel", "==", channel) + .where("version", "==", version) + .limit(1) + .stream() + ) + if not docs: + raise NotFoundError("Firmware version") + fw = _doc_to_firmware_version(docs[0]) + if not fw.changelog: + raise NotFoundError("Changelog") + return fw.changelog + + def get_firmware_path(hw_type: str, channel: str, version: str) -> Path: path = _storage_path(hw_type, channel, version) if not path.exists(): @@ -205,6 +283,82 @@ def record_ota_event(event_type: str, payload: dict[str, Any]) -> None: logger.warning("Failed to persist OTA event (%s): %s", event_type, exc) +def edit_firmware( + doc_id: str, + channel: str | None = None, + version: str | None = None, + update_type: UpdateType | None = None, + min_fw_version: str | None = None, + changelog: str | None = None, + release_note: str | None = None, + bespoke_uid: str | None = None, + file_bytes: bytes | None = None, +) -> FirmwareVersion: + db = get_db() + doc_ref = db.collection(COLLECTION).document(doc_id) + doc = doc_ref.get() + if not doc.exists: + raise NotFoundError("Firmware") + + data = doc.to_dict() or {} + hw_type = data["hw_type"] + old_channel = data.get("channel", "") + old_version = data.get("version", "") + + effective_channel = channel if channel is not None else old_channel + effective_version = version if version is not None else old_version + + if channel is not None and channel not in VALID_CHANNELS: + raise HTTPException(status_code=400, detail=f"Invalid channel. Must be one of: {', '.join(sorted(VALID_CHANNELS))}") + + updates: dict = {} + if channel is not None: + updates["channel"] = channel + if version is not None: + updates["version"] = version + if update_type is not None: + updates["update_type"] = update_type.value + if min_fw_version is not None: + updates["min_fw_version"] = min_fw_version if min_fw_version else None + if changelog is not None: + updates["changelog"] = changelog if changelog else None + if release_note is not None: + updates["release_note"] = release_note if release_note else None + if bespoke_uid is not None: + updates["bespoke_uid"] = bespoke_uid if bespoke_uid else None + + if file_bytes is not None: + # Move binary if path changed + old_path = _storage_path(hw_type, old_channel, old_version) + new_path = _storage_path(hw_type, effective_channel, effective_version) + if old_path != new_path and old_path.exists(): + old_path.unlink() + try: + old_path.parent.rmdir() + except OSError: + pass + new_path.parent.mkdir(parents=True, exist_ok=True) + new_path.write_bytes(file_bytes) + updates["sha256"] = hashlib.sha256(file_bytes).hexdigest() + updates["size_bytes"] = len(file_bytes) + elif (channel is not None and channel != old_channel) or (version is not None and version != old_version): + # Path changed but no new file — move existing binary + old_path = _storage_path(hw_type, old_channel, old_version) + new_path = _storage_path(hw_type, effective_channel, effective_version) + if old_path.exists() and old_path != new_path: + new_path.parent.mkdir(parents=True, exist_ok=True) + old_path.rename(new_path) + try: + old_path.parent.rmdir() + except OSError: + pass + + if updates: + doc_ref.update(updates) + + return _doc_to_firmware_version(doc_ref.get()) + + def delete_firmware(doc_id: str) -> None: db = get_db() doc_ref = db.collection(COLLECTION).document(doc_id) diff --git a/backend/main.py b/backend/main.py index 764a7a3..b62f64a 100644 --- a/backend/main.py +++ b/backend/main.py @@ -24,6 +24,7 @@ from crm.comms_router import router as crm_comms_router from crm.media_router import router as crm_media_router from crm.nextcloud_router import router as crm_nextcloud_router from crm.quotations_router import router as crm_quotations_router +from public.router import router as public_router from crm.nextcloud import close_client as close_nextcloud_client, keepalive_ping as nextcloud_keepalive from crm.mail_accounts import get_mail_accounts from mqtt.client import mqtt_manager @@ -67,6 +68,7 @@ app.include_router(crm_comms_router) app.include_router(crm_media_router) app.include_router(crm_nextcloud_router) app.include_router(crm_quotations_router) +app.include_router(public_router) async def nextcloud_keepalive_loop(): diff --git a/backend/manufacturing/models.py b/backend/manufacturing/models.py index 491876e..f9ed93c 100644 --- a/backend/manufacturing/models.py +++ b/backend/manufacturing/models.py @@ -55,6 +55,13 @@ class MfgStatus(str, Enum): decommissioned = "decommissioned" +class LifecycleEntry(BaseModel): + status_id: str + date: str # ISO 8601 UTC string + note: Optional[str] = None + set_by: Optional[str] = None + + class BatchCreate(BaseModel): board_type: BoardType board_version: str = Field( @@ -84,6 +91,9 @@ class DeviceInventoryItem(BaseModel): owner: Optional[str] = None assigned_to: Optional[str] = None device_name: Optional[str] = None + lifecycle_history: Optional[List["LifecycleEntry"]] = None + customer_id: Optional[str] = None + user_list: Optional[List[str]] = None class DeviceInventoryListResponse(BaseModel): @@ -94,11 +104,19 @@ class DeviceInventoryListResponse(BaseModel): class DeviceStatusUpdate(BaseModel): status: MfgStatus note: Optional[str] = None + force_claimed: bool = False class DeviceAssign(BaseModel): - customer_email: str - customer_name: Optional[str] = None + customer_id: str + + +class CustomerSearchResult(BaseModel): + id: str + name: str = "" + email: str = "" + organization: str = "" + phone: str = "" class RecentActivityItem(BaseModel): diff --git a/backend/manufacturing/router.py b/backend/manufacturing/router.py index f5ee1c1..caa69ae 100644 --- a/backend/manufacturing/router.py +++ b/backend/manufacturing/router.py @@ -1,7 +1,8 @@ -from fastapi import APIRouter, Depends, Query, HTTPException, UploadFile, File, Form +from fastapi import APIRouter, Depends, Query, HTTPException, UploadFile, File from fastapi.responses import Response from fastapi.responses import RedirectResponse from typing import Optional +from pydantic import BaseModel from auth.models import TokenPayload from auth.dependencies import require_permission @@ -14,9 +15,23 @@ from manufacturing.models import ( from manufacturing import service from manufacturing import audit from shared.exceptions import NotFoundError +from shared.firebase import get_db as get_firestore + + +class LifecycleEntryPatch(BaseModel): + index: int + date: Optional[str] = None + note: Optional[str] = None + +class LifecycleEntryCreate(BaseModel): + status_id: str + date: Optional[str] = None + note: Optional[str] = None VALID_FLASH_ASSETS = {"bootloader.bin", "partitions.bin"} VALID_HW_TYPES_MFG = {"vesper", "vesper_plus", "vesper_pro", "agnus", "agnus_mini", "chronos", "chronos_pro"} +# Bespoke UIDs are dynamic — we allow any non-empty slug that doesn't clash with +# a standard hw_type name. The flash-asset upload endpoint checks this below. router = APIRouter(prefix="/api/manufacturing", tags=["manufacturing"]) @@ -83,13 +98,75 @@ def get_device( return service.get_device_by_sn(sn) +@router.get("/customers/search") +def search_customers( + q: str = Query(""), + _user: TokenPayload = Depends(require_permission("manufacturing", "view")), +): + """Search CRM customers by name, email, phone, organization, or tags.""" + results = service.search_customers(q) + return {"results": results} + + +@router.get("/customers/{customer_id}") +def get_customer( + customer_id: str, + _user: TokenPayload = Depends(require_permission("manufacturing", "view")), +): + """Get a single CRM customer by ID.""" + db = get_firestore() + doc = db.collection("crm_customers").document(customer_id).get() + if not doc.exists: + raise HTTPException(status_code=404, detail="Customer not found") + data = doc.to_dict() or {} + loc = data.get("location") or {} + city = loc.get("city") if isinstance(loc, dict) else None + return { + "id": doc.id, + "name": data.get("name") or "", + "surname": data.get("surname") or "", + "email": data.get("email") or "", + "organization": data.get("organization") or "", + "phone": data.get("phone") or "", + "city": city or "", + } + + @router.patch("/devices/{sn}/status", response_model=DeviceInventoryItem) async def update_status( sn: str, body: DeviceStatusUpdate, user: TokenPayload = Depends(require_permission("manufacturing", "edit")), ): - result = service.update_device_status(sn, body) + # Guard: claimed requires at least one user in user_list + # (allow if explicitly force_claimed=true, which the mfg UI sets after adding a user manually) + if body.status.value == "claimed": + db = get_firestore() + docs = list(db.collection("devices").where("serial_number", "==", sn).limit(1).stream()) + if docs: + data = docs[0].to_dict() or {} + user_list = data.get("user_list", []) or [] + if not user_list and not getattr(body, "force_claimed", False): + raise HTTPException( + status_code=400, + detail="Cannot set status to 'claimed': device has no users in user_list. " + "Assign a user first, then set to Claimed.", + ) + + # Guard: sold requires a customer assigned + if body.status.value == "sold": + db = get_firestore() + docs = list(db.collection("devices").where("serial_number", "==", sn).limit(1).stream()) + if docs: + data = docs[0].to_dict() or {} + if not data.get("customer_id"): + raise HTTPException( + status_code=400, + detail="Cannot set status to 'sold' without an assigned customer. " + "Use the 'Assign to Customer' action first.", + ) + + result = service.update_device_status(sn, body, set_by=user.email) await audit.log_action( admin_user=user.email, action="status_updated", @@ -99,12 +176,91 @@ async def update_status( return result +@router.patch("/devices/{sn}/lifecycle", response_model=DeviceInventoryItem) +async def patch_lifecycle_entry( + sn: str, + body: LifecycleEntryPatch, + user: TokenPayload = Depends(require_permission("manufacturing", "edit")), +): + """Edit the date and/or note of a lifecycle history entry by index.""" + db = get_firestore() + docs = list(db.collection("devices").where("serial_number", "==", sn).limit(1).stream()) + if not docs: + raise HTTPException(status_code=404, detail="Device not found") + doc_ref = docs[0].reference + data = docs[0].to_dict() or {} + history = data.get("lifecycle_history") or [] + if body.index < 0 or body.index >= len(history): + raise HTTPException(status_code=400, detail="Invalid lifecycle entry index") + if body.date is not None: + history[body.index]["date"] = body.date + if body.note is not None: + history[body.index]["note"] = body.note + doc_ref.update({"lifecycle_history": history}) + from manufacturing.service import _doc_to_inventory_item + return _doc_to_inventory_item(doc_ref.get()) + + +@router.post("/devices/{sn}/lifecycle", response_model=DeviceInventoryItem, status_code=201) +async def create_lifecycle_entry( + sn: str, + body: LifecycleEntryCreate, + user: TokenPayload = Depends(require_permission("manufacturing", "edit")), +): + """Create a lifecycle history entry for a step that has no entry yet (on-the-fly).""" + from datetime import datetime, timezone + db = get_firestore() + docs = list(db.collection("devices").where("serial_number", "==", sn).limit(1).stream()) + if not docs: + raise HTTPException(status_code=404, detail="Device not found") + doc_ref = docs[0].reference + data = docs[0].to_dict() or {} + history = data.get("lifecycle_history") or [] + new_entry = { + "status_id": body.status_id, + "date": body.date or datetime.now(timezone.utc).isoformat(), + "note": body.note, + "set_by": user.email, + } + history.append(new_entry) + doc_ref.update({"lifecycle_history": history}) + from manufacturing.service import _doc_to_inventory_item + return _doc_to_inventory_item(doc_ref.get()) + + +@router.delete("/devices/{sn}/lifecycle/{index}", response_model=DeviceInventoryItem) +async def delete_lifecycle_entry( + sn: str, + index: int, + user: TokenPayload = Depends(require_permission("manufacturing", "edit")), +): + """Delete a lifecycle history entry by index. Cannot delete the entry for the current status.""" + db = get_firestore() + docs = list(db.collection("devices").where("serial_number", "==", sn).limit(1).stream()) + if not docs: + raise HTTPException(status_code=404, detail="Device not found") + doc_ref = docs[0].reference + data = docs[0].to_dict() or {} + history = data.get("lifecycle_history") or [] + if index < 0 or index >= len(history): + raise HTTPException(status_code=400, detail="Invalid lifecycle entry index") + current_status = data.get("mfg_status", "") + if history[index].get("status_id") == current_status: + raise HTTPException(status_code=400, detail="Cannot delete the entry for the current status. Change the status first.") + history.pop(index) + doc_ref.update({"lifecycle_history": history}) + from manufacturing.service import _doc_to_inventory_item + return _doc_to_inventory_item(doc_ref.get()) + + @router.get("/devices/{sn}/nvs.bin") async def download_nvs( sn: str, + hw_type_override: Optional[str] = Query(None, description="Override hw_type written to NVS (for bespoke firmware)"), + hw_revision_override: Optional[str] = Query(None, description="Override hw_revision written to NVS (for bespoke firmware)"), user: TokenPayload = Depends(require_permission("manufacturing", "view")), ): - binary = service.get_nvs_binary(sn) + binary = service.get_nvs_binary(sn, hw_type_override=hw_type_override, hw_revision_override=hw_revision_override) await audit.log_action( admin_user=user.email, action="device_flashed", @@ -123,12 +279,15 @@ async def assign_device( body: DeviceAssign, user: TokenPayload = Depends(require_permission("manufacturing", "edit")), ): - result = service.assign_device(sn, body) + try: + result = service.assign_device(sn, body) + except NotFoundError as e: + raise HTTPException(status_code=404, detail=str(e)) await audit.log_action( admin_user=user.email, action="device_assigned", serial_number=sn, - detail={"customer_email": body.customer_email, "customer_name": body.customer_name}, + detail={"customer_id": body.customer_id}, ) return result @@ -201,8 +360,9 @@ async def upload_flash_asset( and .pio/build/{env}/partitions.bin). Upload them once per hw_type after each PlatformIO build that changes the partition layout. """ - if hw_type not in VALID_HW_TYPES_MFG: - raise HTTPException(status_code=400, detail=f"Invalid hw_type. Must be one of: {', '.join(sorted(VALID_HW_TYPES_MFG))}") + # hw_type can be a standard board type OR a bespoke UID (any non-empty slug) + if not hw_type or len(hw_type) > 128: + raise HTTPException(status_code=400, detail="Invalid hw_type/bespoke UID.") if asset not in VALID_FLASH_ASSETS: raise HTTPException(status_code=400, detail=f"Invalid asset. Must be one of: {', '.join(sorted(VALID_FLASH_ASSETS))}") data = await file.read() @@ -212,34 +372,38 @@ async def upload_flash_asset( @router.get("/devices/{sn}/bootloader.bin") def download_bootloader( sn: str, + hw_type_override: Optional[str] = Query(None, description="Override hw_type for flash asset lookup (for bespoke firmware)"), _user: TokenPayload = Depends(require_permission("manufacturing", "view")), ): """Return the bootloader.bin for this device's hw_type (flashed at 0x1000).""" item = service.get_device_by_sn(sn) + hw_type = hw_type_override or item.hw_type try: - data = service.get_flash_asset(item.hw_type, "bootloader.bin") + data = service.get_flash_asset(hw_type, "bootloader.bin") except NotFoundError as e: raise HTTPException(status_code=404, detail=str(e)) return Response( content=data, media_type="application/octet-stream", - headers={"Content-Disposition": f'attachment; filename="bootloader_{item.hw_type}.bin"'}, + headers={"Content-Disposition": f'attachment; filename="bootloader_{hw_type}.bin"'}, ) @router.get("/devices/{sn}/partitions.bin") def download_partitions( sn: str, + hw_type_override: Optional[str] = Query(None, description="Override hw_type for flash asset lookup (for bespoke firmware)"), _user: TokenPayload = Depends(require_permission("manufacturing", "view")), ): """Return the partitions.bin for this device's hw_type (flashed at 0x8000).""" item = service.get_device_by_sn(sn) + hw_type = hw_type_override or item.hw_type try: - data = service.get_flash_asset(item.hw_type, "partitions.bin") + data = service.get_flash_asset(hw_type, "partitions.bin") except NotFoundError as e: raise HTTPException(status_code=404, detail=str(e)) return Response( content=data, media_type="application/octet-stream", - headers={"Content-Disposition": f'attachment; filename="partitions_{item.hw_type}.bin"'}, + headers={"Content-Disposition": f'attachment; filename="partitions_{hw_type}.bin"'}, ) diff --git a/backend/manufacturing/service.py b/backend/manufacturing/service.py index a3b1a82..93437c1 100644 --- a/backend/manufacturing/service.py +++ b/backend/manufacturing/service.py @@ -33,6 +33,18 @@ def _get_existing_sns(db) -> set: return existing +def _resolve_user_list(raw_list: list) -> list[str]: + """Convert user_list entries (DocumentReferences or path strings) to plain user ID strings.""" + from google.cloud.firestore_v1 import DocumentReference + result = [] + for entry in raw_list: + if isinstance(entry, DocumentReference): + result.append(entry.id) + elif isinstance(entry, str): + result.append(entry.split("/")[-1]) + return result + + def _doc_to_inventory_item(doc) -> DeviceInventoryItem: data = doc.to_dict() or {} created_raw = data.get("created_at") @@ -52,6 +64,9 @@ def _doc_to_inventory_item(doc) -> DeviceInventoryItem: owner=data.get("owner"), assigned_to=data.get("assigned_to"), device_name=data.get("device_name") or None, + lifecycle_history=data.get("lifecycle_history") or [], + customer_id=data.get("customer_id"), + user_list=_resolve_user_list(data.get("user_list") or []), ) @@ -80,11 +95,19 @@ def create_batch(data: BatchCreate) -> BatchResponse: "created_at": now, "owner": None, "assigned_to": None, - "users_list": [], + "user_list": [], # Legacy fields left empty so existing device views don't break "device_name": "", "device_location": "", "is_Online": False, + "lifecycle_history": [ + { + "status_id": "manufactured", + "date": now.isoformat(), + "note": None, + "set_by": None, + } + ], }) serial_numbers.append(sn) @@ -135,14 +158,31 @@ def get_device_by_sn(sn: str) -> DeviceInventoryItem: return _doc_to_inventory_item(docs[0]) -def update_device_status(sn: str, data: DeviceStatusUpdate) -> DeviceInventoryItem: +def update_device_status(sn: str, data: DeviceStatusUpdate, set_by: str | None = None) -> DeviceInventoryItem: db = get_db() docs = list(db.collection(COLLECTION).where("serial_number", "==", sn).limit(1).stream()) if not docs: raise NotFoundError("Device") doc_ref = docs[0].reference - update = {"mfg_status": data.status.value} + doc_data = docs[0].to_dict() or {} + now = datetime.now(timezone.utc).isoformat() + + history = doc_data.get("lifecycle_history") or [] + + # Append new lifecycle entry + new_entry = { + "status_id": data.status.value, + "date": now, + "note": data.note if data.note else None, + "set_by": set_by, + } + history.append(new_entry) + + update = { + "mfg_status": data.status.value, + "lifecycle_history": history, + } if data.note: update["mfg_status_note"] = data.note doc_ref.update(update) @@ -150,47 +190,114 @@ def update_device_status(sn: str, data: DeviceStatusUpdate) -> DeviceInventoryIt return _doc_to_inventory_item(doc_ref.get()) -def get_nvs_binary(sn: str) -> bytes: +def get_nvs_binary(sn: str, hw_type_override: str | None = None, hw_revision_override: str | None = None) -> bytes: item = get_device_by_sn(sn) return generate_nvs_binary( serial_number=item.serial_number, - hw_type=item.hw_type, - hw_version=item.hw_version, + hw_family=hw_type_override if hw_type_override else item.hw_type, + hw_revision=hw_revision_override if hw_revision_override else item.hw_version, ) def assign_device(sn: str, data: DeviceAssign) -> DeviceInventoryItem: - from utils.email import send_device_assignment_invite + """Assign a device to a customer by customer_id. + - Stores customer_id on the device doc. + - Adds the device to the customer's owned_items list. + - Sets mfg_status to 'sold' unless device is already 'claimed'. + """ db = get_db() + CRM_COLLECTION = "crm_customers" + + # Get device doc docs = list(db.collection(COLLECTION).where("serial_number", "==", sn).limit(1).stream()) if not docs: raise NotFoundError("Device") doc_data = docs[0].to_dict() or {} doc_ref = docs[0].reference - doc_ref.update({ - "owner": data.customer_email, - "assigned_to": data.customer_email, - "mfg_status": "sold", + current_status = doc_data.get("mfg_status", "manufactured") + + # Get customer doc + customer_ref = db.collection(CRM_COLLECTION).document(data.customer_id) + customer_doc = customer_ref.get() + if not customer_doc.exists: + raise NotFoundError("Customer") + customer_data = customer_doc.to_dict() or {} + + # Determine new status: don't downgrade claimed → sold + new_status = current_status if current_status == "claimed" else "sold" + + now = datetime.now(timezone.utc).isoformat() + history = doc_data.get("lifecycle_history") or [] + history.append({ + "status_id": new_status, + "date": now, + "note": "Assigned to customer", + "set_by": None, }) - hw_type = doc_data.get("hw_type", "") - device_name = BOARD_TYPE_LABELS.get(hw_type, hw_type or "Device") + doc_ref.update({ + "customer_id": data.customer_id, + "mfg_status": new_status, + "lifecycle_history": history, + }) - try: - send_device_assignment_invite( - customer_email=data.customer_email, - serial_number=sn, - device_name=device_name, - customer_name=data.customer_name, - ) - except Exception as exc: - logger.error("Assignment succeeded but email failed for %s → %s: %s", sn, data.customer_email, exc) + # Add to customer's owned_items (avoid duplicates) + owned_items = customer_data.get("owned_items", []) or [] + device_doc_id = docs[0].id + already_assigned = any( + item.get("type") == "console_device" + and item.get("console_device", {}).get("device_id") == device_doc_id + for item in owned_items + ) + if not already_assigned: + device_name = doc_data.get("device_name") or BOARD_TYPE_LABELS.get(doc_data.get("hw_type", ""), sn) + owned_items.append({ + "type": "console_device", + "console_device": { + "device_id": device_doc_id, + "serial_number": sn, + "label": device_name, + }, + }) + customer_ref.update({"owned_items": owned_items}) return _doc_to_inventory_item(doc_ref.get()) +def search_customers(q: str) -> list: + """Search crm_customers by name, email, phone, organization, or tags.""" + db = get_db() + CRM_COLLECTION = "crm_customers" + docs = db.collection(CRM_COLLECTION).stream() + results = [] + q_lower = q.lower().strip() + for doc in docs: + data = doc.to_dict() or {} + loc = data.get("location") or {} + loc = loc if isinstance(loc, dict) else {} + city = loc.get("city") or "" + searchable = " ".join(filter(None, [ + data.get("name"), data.get("surname"), + data.get("email"), data.get("phone"), data.get("organization"), + loc.get("address"), loc.get("city"), loc.get("postal_code"), + loc.get("region"), loc.get("country"), + " ".join(data.get("tags") or []), + ])).lower() + if not q_lower or q_lower in searchable: + results.append({ + "id": doc.id, + "name": data.get("name") or "", + "surname": data.get("surname") or "", + "email": data.get("email") or "", + "organization": data.get("organization") or "", + "phone": data.get("phone") or "", + "city": city or "", + }) + return results + + def get_stats() -> ManufacturingStats: db = get_db() docs = list(db.collection(COLLECTION).stream()) diff --git a/backend/melodies/models.py b/backend/melodies/models.py index c238c8a..6520e26 100644 --- a/backend/melodies/models.py +++ b/backend/melodies/models.py @@ -30,6 +30,7 @@ class MelodyInfo(BaseModel): isTrueRing: bool = False previewURL: str = "" archetype_csv: Optional[str] = None + outdated_archetype: bool = False class MelodyAttributes(BaseModel): diff --git a/backend/melodies/router.py b/backend/melodies/router.py index b029c1b..5b58fa8 100644 --- a/backend/melodies/router.py +++ b/backend/melodies/router.py @@ -146,6 +146,23 @@ async def get_files( return service.get_storage_files(melody_id, melody.uid) +@router.patch("/{melody_id}/set-outdated", response_model=MelodyInDB) +async def set_outdated( + melody_id: str, + outdated: bool = Query(...), + _user: TokenPayload = Depends(require_permission("melodies", "edit")), +): + """Manually set or clear the outdated_archetype flag on a melody.""" + melody = await service.get_melody(melody_id) + info = melody.information.model_dump() + info["outdated_archetype"] = outdated + return await service.update_melody( + melody_id, + MelodyUpdate(information=MelodyInfo(**info)), + actor_name=_user.name, + ) + + @router.get("/{melody_id}/download/binary") async def download_binary_file( melody_id: str, diff --git a/backend/public/__init__.py b/backend/public/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/public/router.py b/backend/public/router.py new file mode 100644 index 0000000..55d237e --- /dev/null +++ b/backend/public/router.py @@ -0,0 +1,208 @@ +""" +Public (no-auth) endpoints for CloudFlash and feature gate checks. +""" +from fastapi import APIRouter, HTTPException +from fastapi.responses import Response +from pydantic import BaseModel +from typing import List, Optional + +from settings.public_features_service import get_public_features +from firmware.service import list_firmware +from utils.nvs_generator import generate as generate_nvs +from manufacturing.service import get_device_by_sn +from shared.exceptions import NotFoundError + +router = APIRouter(prefix="/api/public", tags=["public"]) + + +# ── Feature gate ────────────────────────────────────────────────────────────── + +class CloudFlashStatus(BaseModel): + enabled: bool + + +@router.get("/cloudflash/status", response_model=CloudFlashStatus) +async def cloudflash_status(): + """Returns whether the CloudFlash public page is currently enabled.""" + settings = get_public_features() + return CloudFlashStatus(enabled=settings.cloudflash_enabled) + + +def _require_cloudflash_enabled(): + """Raises 403 if CloudFlash is disabled.""" + settings = get_public_features() + if not settings.cloudflash_enabled: + raise HTTPException(status_code=403, detail="CloudFlash is currently disabled.") + + +# ── Public firmware list ─────────────────────────────────────────────────────── + +class PublicFirmwareOption(BaseModel): + hw_type: str + hw_type_label: str + channel: str + version: str + download_url: str + + +HW_TYPE_LABELS = { + "vesper": "Vesper", + "vesper_plus": "Vesper Plus", + "vesper_pro": "Vesper Pro", + "agnus": "Agnus", + "agnus_mini": "Agnus Mini", + "chronos": "Chronos", + "chronos_pro": "Chronos Pro", +} + + +@router.get("/cloudflash/firmware", response_model=List[PublicFirmwareOption]) +async def list_public_firmware(): + """ + Returns all available firmware options (is_latest=True, non-bespoke, stable channel only). + No authentication required — used by the public CloudFlash page. + """ + _require_cloudflash_enabled() + + all_fw = list_firmware() + options = [] + for fw in all_fw: + if not fw.is_latest: + continue + if fw.hw_type == "bespoke": + continue + if fw.channel != "stable": + continue + options.append(PublicFirmwareOption( + hw_type=fw.hw_type, + hw_type_label=HW_TYPE_LABELS.get(fw.hw_type, fw.hw_type.replace("_", " ").title()), + channel=fw.channel, + version=fw.version, + download_url=f"/api/firmware/{fw.hw_type}/{fw.channel}/{fw.version}/firmware.bin", + )) + + # Sort by hw_type label + options.sort(key=lambda x: x.hw_type_label) + return options + + +# ── Public serial number validation ────────────────────────────────────────── + +class SerialValidationResult(BaseModel): + valid: bool + hw_type: Optional[str] = None + hw_type_label: Optional[str] = None + hw_version: Optional[str] = None + + +@router.get("/cloudflash/validate-serial/{serial_number}", response_model=SerialValidationResult) +async def validate_serial(serial_number: str): + """ + Check whether a serial number exists in the device database. + Returns hw_type info if found so the frontend can confirm it matches the user's selection. + No sensitive device data is returned. + """ + _require_cloudflash_enabled() + + sn = serial_number.strip().upper() + try: + device = get_device_by_sn(sn) + return SerialValidationResult( + valid=True, + hw_type=device.hw_type, + hw_type_label=HW_TYPE_LABELS.get(device.hw_type, device.hw_type.replace("_", " ").title()), + hw_version=device.hw_version, + ) + except Exception: + return SerialValidationResult(valid=False) + + +# ── Public NVS generation ───────────────────────────────────────────────────── + +class NvsRequest(BaseModel): + serial_number: str + hw_type: str + hw_revision: str + + +@router.post("/cloudflash/nvs.bin") +async def generate_public_nvs(body: NvsRequest): + """ + Generate an NVS binary for a given serial number + hardware info. + No authentication required — used by the public CloudFlash page for Full Wipe flash. + The serial number is provided by the user (they read it from the sticker on their device). + """ + _require_cloudflash_enabled() + + sn = body.serial_number.strip().upper() + if not sn: + raise HTTPException(status_code=422, detail="Serial number is required.") + + hw_type = body.hw_type.strip().lower() + hw_revision = body.hw_revision.strip() + + if not hw_type or not hw_revision: + raise HTTPException(status_code=422, detail="hw_type and hw_revision are required.") + + try: + nvs_bytes = generate_nvs( + serial_number=sn, + hw_family=hw_type, + hw_revision=hw_revision, + ) + except Exception as e: + raise HTTPException(status_code=500, detail=f"NVS generation failed: {str(e)}") + + return Response( + content=nvs_bytes, + media_type="application/octet-stream", + headers={"Content-Disposition": f'attachment; filename="{sn}_nvs.bin"'}, + ) + + +# ── Public flash assets (bootloader + partitions) ───────────────────────────── + +@router.get("/cloudflash/{hw_type}/bootloader.bin") +async def get_public_bootloader(hw_type: str): + """ + Serve the bootloader binary for a given hw_type. + No authentication required — used by the public CloudFlash page. + """ + _require_cloudflash_enabled() + + import os + from config import settings as cfg + from pathlib import Path + + asset_path = Path(cfg.flash_assets_storage_path) / hw_type / "bootloader.bin" + if not asset_path.exists(): + raise HTTPException(status_code=404, detail=f"Bootloader not found for {hw_type}.") + + return Response( + content=asset_path.read_bytes(), + media_type="application/octet-stream", + headers={"Content-Disposition": f'attachment; filename="bootloader_{hw_type}.bin"'}, + ) + + +@router.get("/cloudflash/{hw_type}/partitions.bin") +async def get_public_partitions(hw_type: str): + """ + Serve the partition table binary for a given hw_type. + No authentication required — used by the public CloudFlash page. + """ + _require_cloudflash_enabled() + + import os + from config import settings as cfg + from pathlib import Path + + asset_path = Path(cfg.flash_assets_storage_path) / hw_type / "partitions.bin" + if not asset_path.exists(): + raise HTTPException(status_code=404, detail=f"Partition table not found for {hw_type}.") + + return Response( + content=asset_path.read_bytes(), + media_type="application/octet-stream", + headers={"Content-Disposition": f'attachment; filename="partitions_{hw_type}.bin"'}, + ) diff --git a/backend/settings/public_features_models.py b/backend/settings/public_features_models.py new file mode 100644 index 0000000..7719856 --- /dev/null +++ b/backend/settings/public_features_models.py @@ -0,0 +1,10 @@ +from pydantic import BaseModel +from typing import Optional + + +class PublicFeaturesSettings(BaseModel): + cloudflash_enabled: bool = False + + +class PublicFeaturesSettingsUpdate(BaseModel): + cloudflash_enabled: Optional[bool] = None diff --git a/backend/settings/public_features_service.py b/backend/settings/public_features_service.py new file mode 100644 index 0000000..04c4f7e --- /dev/null +++ b/backend/settings/public_features_service.py @@ -0,0 +1,31 @@ +from shared.firebase import get_db +from settings.public_features_models import PublicFeaturesSettings, PublicFeaturesSettingsUpdate + +COLLECTION = "admin_settings" +DOC_ID = "public_features" + + +def get_public_features() -> PublicFeaturesSettings: + """Get public features settings from Firestore. Creates defaults if not found.""" + db = get_db() + doc = db.collection(COLLECTION).document(DOC_ID).get() + if doc.exists: + return PublicFeaturesSettings(**doc.to_dict()) + defaults = PublicFeaturesSettings() + db.collection(COLLECTION).document(DOC_ID).set(defaults.model_dump()) + return defaults + + +def update_public_features(data: PublicFeaturesSettingsUpdate) -> PublicFeaturesSettings: + """Update public features settings. Only provided fields are updated.""" + db = get_db() + doc_ref = db.collection(COLLECTION).document(DOC_ID) + doc = doc_ref.get() + + existing = doc.to_dict() if doc.exists else PublicFeaturesSettings().model_dump() + update_data = data.model_dump(exclude_none=True) + existing.update(update_data) + + normalized = PublicFeaturesSettings(**existing) + doc_ref.set(normalized.model_dump()) + return normalized diff --git a/backend/settings/router.py b/backend/settings/router.py index cfd7163..e68f5b1 100644 --- a/backend/settings/router.py +++ b/backend/settings/router.py @@ -1,8 +1,11 @@ from fastapi import APIRouter, Depends from auth.models import TokenPayload -from auth.dependencies import require_permission +from auth.dependencies import require_permission, require_roles +from auth.models import Role from settings.models import MelodySettings, MelodySettingsUpdate +from settings.public_features_models import PublicFeaturesSettings, PublicFeaturesSettingsUpdate from settings import service +from settings import public_features_service router = APIRouter(prefix="/api/settings", tags=["settings"]) @@ -20,3 +23,20 @@ async def update_melody_settings( _user: TokenPayload = Depends(require_permission("melodies", "edit")), ): return service.update_melody_settings(body) + + +# ── Public Features Settings (sysadmin / admin only) ───────────────────────── + +@router.get("/public-features", response_model=PublicFeaturesSettings) +async def get_public_features( + _user: TokenPayload = Depends(require_roles(Role.sysadmin, Role.admin)), +): + return public_features_service.get_public_features() + + +@router.put("/public-features", response_model=PublicFeaturesSettings) +async def update_public_features( + body: PublicFeaturesSettingsUpdate, + _user: TokenPayload = Depends(require_roles(Role.sysadmin, Role.admin)), +): + return public_features_service.update_public_features(body) diff --git a/backend/utils/nvs_generator.py b/backend/utils/nvs_generator.py index 58541a9..b2b72cb 100644 --- a/backend/utils/nvs_generator.py +++ b/backend/utils/nvs_generator.py @@ -177,16 +177,16 @@ def _build_page(entries: List[bytes], slot_counts: List[int], seq: int = 0) -> b return page -def generate(serial_number: str, hw_type: str, hw_version: str) -> bytes: +def generate(serial_number: str, hw_family: str, hw_revision: str) -> bytes: """Generate a 0x5000-byte NVS partition binary for a Vesper device. serial_number: full SN string e.g. 'BSVSPR-26C13X-STD01R-X7KQA' - hw_type: board family e.g. 'vesper', 'vesper_plus', 'vesper_pro' - hw_version: zero-padded revision e.g. '01' + hw_family: board family e.g. 'vesper-standard', 'vesper-plus' + hw_revision: hardware revision string e.g. '1.0' - Writes the NEW schema keys (2.0+) expected by ConfigManager: + Writes the schema keys expected by ConfigManager (struct DeviceConfig): serial ← full serial number - hw_family ← board family (hw_type value, lowercase) + hw_family ← board family (lowercase) hw_revision ← hardware revision string Returns raw bytes ready to flash at 0x9000. @@ -196,8 +196,8 @@ def generate(serial_number: str, hw_type: str, hw_version: str) -> bytes: # Build entries for namespace "device_id" ns_entry, ns_span = _build_namespace_entry("device_id", ns_index) uid_entry, uid_span = _build_string_entry(ns_index, "serial", serial_number) - hwt_entry, hwt_span = _build_string_entry(ns_index, "hw_family", hw_type.lower()) - hwv_entry, hwv_span = _build_string_entry(ns_index, "hw_revision", hw_version) + hwt_entry, hwt_span = _build_string_entry(ns_index, "hw_family", hw_family.lower()) + hwv_entry, hwv_span = _build_string_entry(ns_index, "hw_revision", hw_revision) entries = [ns_entry, uid_entry, hwt_entry, hwv_entry] spans = [ns_span, uid_span, hwt_span, hwv_span] diff --git a/frontend/src/App.jsx b/frontend/src/App.jsx index d5fdb05..95e4ade 100644 --- a/frontend/src/App.jsx +++ b/frontend/src/App.jsx @@ -1,5 +1,7 @@ import { Routes, Route, Navigate } from "react-router-dom"; import { useAuth } from "./auth/AuthContext"; +import CloudFlashPage from "./cloudflash/CloudFlashPage"; +import PublicFeaturesSettings from "./settings/PublicFeaturesSettings"; import LoginPage from "./auth/LoginPage"; import MainLayout from "./layout/MainLayout"; import MelodyList from "./melodies/MelodyList"; @@ -106,6 +108,9 @@ function RoleGate({ roles, children }) { export default function App() { return ( + {/* Public routes — no login required */} + } /> + } /> } /> } /> + {/* Settings - Public Features */} + } /> + } /> diff --git a/frontend/src/assets/logos/cloudflash_large.png b/frontend/src/assets/logos/cloudflash_large.png new file mode 100644 index 0000000..d63918f Binary files /dev/null and b/frontend/src/assets/logos/cloudflash_large.png differ diff --git a/frontend/src/assets/logos/cloudflash_small.png b/frontend/src/assets/logos/cloudflash_small.png new file mode 100644 index 0000000..fbd08a3 Binary files /dev/null and b/frontend/src/assets/logos/cloudflash_small.png differ diff --git a/frontend/src/cloudflash/CloudFlashPage.jsx b/frontend/src/cloudflash/CloudFlashPage.jsx new file mode 100644 index 0000000..5ac4cb4 --- /dev/null +++ b/frontend/src/cloudflash/CloudFlashPage.jsx @@ -0,0 +1,1053 @@ +import { useState, useEffect, useRef } from "react"; +import { ESPLoader, Transport } from "esptool-js"; +import cloudflashLogo from "../assets/logos/cloudflash_large.png"; + +// ─── constants ──────────────────────────────────────────────────────────────── + +const STEPS = [ + { id: 1, label: "Welcome" }, + { id: 2, label: "Hardware" }, + { id: 3, label: "Options" }, + { id: 4, label: "Flash" }, + { id: 5, label: "Done" }, +]; + +const FLASH_BAUD = 460800; +const NVS_ADDRESS = 0x9000; +const FW_ADDRESS = 0x10000; +const VERIFY_POLL_MS = 6000; +const VERIFY_TIMEOUT_MS = 120_000; + +const FLASH_TYPE_FULL = "full_wipe"; +const FLASH_TYPE_FW_ONLY = "fw_only"; + +// Fixed card height so the page doesn't jump between steps +const CARD_MIN_HEIGHT = 420; + +// ─── helpers ────────────────────────────────────────────────────────────────── + +function arrayBufferToString(buf) { + const bytes = new Uint8Array(buf); + let str = ""; + for (let i = 0; i < bytes.length; i++) str += String.fromCharCode(bytes[i]); + return str; +} + +async function fetchBinary(url) { + const resp = await fetch(url); + if (!resp.ok) { + const err = await resp.json().catch(() => ({})); + throw new Error(err.detail || `Failed to fetch ${url}: ${resp.status}`); + } + return resp.arrayBuffer(); +} + +async function postForBinary(url, body) { + const resp = await fetch(url, { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify(body), + }); + if (!resp.ok) { + const err = await resp.json().catch(() => ({})); + throw new Error(err.detail || `Request failed: ${resp.status}`); + } + return resp.arrayBuffer(); +} + +// ─── Shared sub-components ──────────────────────────────────────────────────── + +function StepIndicator({ current }) { + return ( +
+ {STEPS.map((step, i) => { + const done = step.id < current; + const active = step.id === current; + const pending = step.id > current; + const isLast = i === STEPS.length - 1; + + return ( +
+ {/* Circle */} +
+
+ {done ? ( + + + + ) : ( + step.id + )} +
+ + {step.label} + +
+ + {/* Connector line */} + {!isLast && ( +
+ )} +
+ ); + })} +
+ ); +} + +function ProgressBar({ label, percent }) { + return ( +
+
+ {label} + {Math.round(percent)}% +
+
+
+
+
+ ); +} + +function InfoBox({ type = "info", children }) { + const styles = { + info: { bg: "var(--badge-blue-bg)", border: "#1e3a5f", color: "var(--badge-blue-text)" }, + warning: { bg: "#2e1a00", border: "#7c4a00", color: "#fb923c" }, + error: { bg: "var(--danger-bg)", border: "var(--danger)", color: "var(--danger-text)" }, + success: { bg: "var(--success-bg)", border: "var(--success)", color: "var(--success-text)" }, + }; + const s = styles[type] || styles.info; + return ( +
+ {children} +
+ ); +} + +function SectionTitle({ children }) { + return ( +

+ {children} +

+ ); +} + +function SubTitle({ children }) { + return ( +

+ {children} +

+ ); +} + +function PrimaryButton({ onClick, disabled, children, fullWidth = false }) { + return ( + + ); +} + +function SecondaryButton({ onClick, disabled, children }) { + return ( + + ); +} + +// ─── Step 1: Welcome ────────────────────────────────────────────────────────── + +function StepWelcome({ onNext }) { + return ( +
+ +

+ Welcome to BellSystems, CloudFlash ! +

+

+ CloudFlash allows you to restore or update the firmware on your BellSystems device

using only + a USB cable and your web browser — no special software required. +

+ +
+

+ Before you begin, please make sure: +

+ {[ + "You are using Google Chrome or Microsoft Edge on a desktop or laptop computer.", + "Your BellSystems device is connected via USB cable.", + "The device is powered on.", + "You have the serial number from the sticker on the bottom of your device (needed for a full restore only).", + ].map((item, i) => ( +
+ + {item} +
+ ))} +
+ + + I'm ready — let's begin → + +
+ ); +} + +// ─── Step 2: Select Hardware ────────────────────────────────────────────────── + +function StepSelectHardware({ onNext }) { + const [firmwares, setFirmwares] = useState([]); + const [loading, setLoading] = useState(true); + const [error, setError] = useState(""); + const [selectedHwType, setHwType] = useState(""); + + useEffect(() => { + fetch("/api/public/cloudflash/firmware") + .then((r) => { + if (!r.ok) return r.json().then((e) => { throw new Error(e.detail || "Failed to load firmware list."); }); + return r.json(); + }) + .then(setFirmwares) + .catch((e) => setError(e.message)) + .finally(() => setLoading(false)); + }, []); + + const selectedFw = firmwares.find((f) => f.hw_type === selectedHwType); + + return ( +
+ Select Your Device Type + + Choose the model that matches your device.

You can find the model name on the white sticker + on the bottom or back panel of your device, labelled "Model". +
+ + {error &&
{error}
} + + {loading ? ( +
+

Loading available devices…

+
+ ) : firmwares.length === 0 ? ( +
+ + No firmware is currently available. Please contact BellSystems support. + +
+ ) : ( +
+ {firmwares.map((fw) => ( + setHwType(fw.hw_type)} + /> + ))} +
+ )} + +
+ + + +

+ Not sure which model you have? The model name is printed on the label on the back of your device. + If you still can't find it, contact BellSystems support before proceeding. +

+
+ +
+ onNext(selectedFw)} disabled={!selectedFw}> + Continue with {selectedFw ? selectedFw.hw_type_label : "selected device"} → + +
+
+ ); +} + +function HardwareCard({ fw, selected, onClick }) { + return ( + + ); +} + +// ─── Step 3: Flash Type ─────────────────────────────────────────────────────── + +function StepSelectFlashType({ firmware, onNext }) { + const [flashType, setFlashType] = useState(""); + const [serial, setSerial] = useState(""); + const [serialError, setSerialError] = useState(""); + const [serialValid, setSerialValid] = useState(false); + const [validating, setValidating] = useState(false); + + const handleSerialBlur = async () => { + const trimmed = serial.trim().toUpperCase(); + if (!trimmed) return; + setValidating(true); + setSerialError(""); + setSerialValid(false); + try { + const resp = await fetch(`/api/public/cloudflash/validate-serial/${encodeURIComponent(trimmed)}`); + const data = await resp.json(); + if (data.valid) { + setSerialValid(true); + } else { + setSerialError( + "We couldn't find this serial number in our database. " + + "Please double-check the sticker on your device and try again. " + + "If you're sure it's correct, please contact BellSystems support." + ); + } + } catch { + setSerialError("Could not verify serial number. Please check your internet connection and try again."); + } finally { + setValidating(false); + } + }; + + const handleContinue = () => { + if (flashType === FLASH_TYPE_FULL) { + const trimmed = serial.trim().toUpperCase(); + if (!trimmed) { setSerialError("Please enter the serial number from the sticker on your device."); return; } + if (!serialValid) { setSerialError("Please wait for the serial number to be verified, or correct it first."); return; } + onNext({ flashType, serial: trimmed }); + } else { + onNext({ flashType, serial: null }); + } + }; + + const isFullWipe = flashType === FLASH_TYPE_FULL; + const inputBorderColor = serialError ? "var(--danger)" : serialValid ? "var(--accent)" : "var(--border-input)"; + + return ( +
+ Choose Flash Type + + Select how you'd like to restore your {firmware.hw_type_label}. + If you're unsure, start with Firmware Only. + + +
+ + + + } + selected={flashType === FLASH_TYPE_FW_ONLY} + onClick={() => setFlashType(FLASH_TYPE_FW_ONLY)} + /> + + + + } + selected={flashType === FLASH_TYPE_FULL} + onClick={() => setFlashType(FLASH_TYPE_FULL)} + /> +
+ + {/* Serial number field — only shown for full wipe */} + {isFullWipe && ( +
+ +

+ Your serial number is printed on the sticker on the bottom of your device.

+ It looks something like: BSVSPR-28F17R-PRO10R-4UAQPF. +

Enter it exactly as shown, then click outside the box to verify. +

+
+ { setSerial(e.target.value.toUpperCase()); setSerialError(""); setSerialValid(false); }} + onBlur={handleSerialBlur} + placeholder="BSXXXX-XXXXXX-XXXXXX-XXXXXX" + className="w-full px-3 py-2.5 rounded-md text-sm border font-mono pr-10" + style={{ backgroundColor: "var(--bg-input)", borderColor: inputBorderColor, color: "var(--text-primary)" }} + spellCheck={false} + disabled={validating} + /> +
+ {validating && ( + + + + + )} + {!validating && serialValid && ( + + + + )} + {!validating && serialError && ( + + + + )} +
+
+ {serialError && ( +

{serialError}

+ )} + {serialValid && ( +

+ ✓ Serial number verified — device found in the BellSystems database. +

+ )} +
+ )} + +
+ + {validating ? "Verifying serial…" : "Continue →"} + +
+
+ ); +} + +function FlashTypeCard({ title, subtitle, description, badge, badgeColor, badgeBg, icon, selected, onClick }) { + return ( + + ); +} + +// ─── Step 4: Connect & Flash ────────────────────────────────────────────────── + +function StepFlash({ firmware, flashType, serial, onDone }) { + const [phase, setPhase] = useState("connect"); // connect | flashing | done + const [connecting, setConnecting] = useState(false); + const [portName, setPortName] = useState(""); + const [portConnected, setPortConnected] = useState(false); + const [blPct, setBlPct] = useState(0); + const [partPct, setPartPct] = useState(0); + const [nvsPct, setNvsPct] = useState(0); + const [fwPct, setFwPct] = useState(0); + const [error, setError] = useState(""); + const [log, setLog] = useState([]); + + const portRef = useRef(null); + const loaderRef = useRef(null); + const logEndRef = useRef(null); + + const appendLog = (msg) => { + setLog((prev) => [...prev, String(msg)]); + setTimeout(() => logEndRef.current?.scrollIntoView({ behavior: "smooth" }), 50); + }; + + const webSerialAvailable = "serial" in navigator; + + const handleConnect = async () => { + setError(""); + setConnecting(true); + try { + const port = await navigator.serial.requestPort(); + const info = port.getInfo?.() || {}; + const label = info.usbVendorId + ? `USB ${info.usbVendorId.toString(16).toUpperCase()}:${(info.usbProductId || 0).toString(16).toUpperCase()}` + : "Serial Port"; + portRef.current = port; + setPortName(label); + setPortConnected(true); + } catch (err) { + if (err.name !== "NotFoundError") { + setError(err.message || "Port selection failed."); + } + } finally { + setConnecting(false); + } + }; + + const handleFlash = async () => { + if (!portRef.current) return; + setError(""); + setLog([]); + setBlPct(0); setPartPct(0); setNvsPct(0); setFwPct(0); + setPhase("flashing"); + + try { + appendLog("Fetching firmware files from BellSystems servers…"); + + let blBuffer = null, partBuffer = null, nvsBuffer = null; + + if (flashType === FLASH_TYPE_FULL) { + appendLog("Downloading bootloader…"); + blBuffer = await fetchBinary(`/api/public/cloudflash/${firmware.hw_type}/bootloader.bin`); + appendLog(`Bootloader: ${blBuffer.byteLength} bytes`); + + appendLog("Downloading partition table…"); + partBuffer = await fetchBinary(`/api/public/cloudflash/${firmware.hw_type}/partitions.bin`); + appendLog(`Partition table: ${partBuffer.byteLength} bytes`); + + appendLog("Generating NVS identity data…"); + nvsBuffer = await postForBinary("/api/public/cloudflash/nvs.bin", { + serial_number: serial, + hw_type: firmware.hw_type, + hw_revision: "1.0", + }); + appendLog(`NVS: ${nvsBuffer.byteLength} bytes`); + } + + appendLog("Downloading firmware…"); + const fwBuffer = await fetchBinary(firmware.download_url); + appendLog(`Firmware: ${fwBuffer.byteLength} bytes`); + + appendLog("Connecting to your device…"); + const transport = new Transport(portRef.current, true); + loaderRef.current = new ESPLoader({ + transport, + baudrate: FLASH_BAUD, + terminal: { + clean() {}, + writeLine: (line) => { appendLog(line); }, + write: (msg) => { appendLog(msg); }, + }, + }); + + await loaderRef.current.main(); + appendLog("Device connected. Starting flash…"); + + const fileArray = flashType === FLASH_TYPE_FULL + ? [ + { data: arrayBufferToString(blBuffer), address: 0x1000 }, + { data: arrayBufferToString(partBuffer), address: 0x8000 }, + { data: arrayBufferToString(nvsBuffer), address: NVS_ADDRESS }, + { data: arrayBufferToString(fwBuffer), address: FW_ADDRESS }, + ] + : [ + { data: arrayBufferToString(fwBuffer), address: FW_ADDRESS }, + ]; + + await loaderRef.current.writeFlash({ + fileArray, + flashSize: "keep", flashMode: "keep", flashFreq: "keep", + eraseAll: false, compress: true, + reportProgress(fileIndex, written, total) { + const pct = (written / total) * 100; + if (flashType === FLASH_TYPE_FULL) { + if (fileIndex === 0) { setBlPct(pct); } + else if (fileIndex === 1) { setBlPct(100); setPartPct(pct); } + else if (fileIndex === 2) { setPartPct(100); setNvsPct(pct); } + else { setNvsPct(100); setFwPct(pct); } + } else { + setFwPct(pct); + } + }, + calculateMD5Hash: () => "", + }); + + if (flashType === FLASH_TYPE_FULL) { + setBlPct(100); setPartPct(100); setNvsPct(100); + } + setFwPct(100); + appendLog("Flash complete! Rebooting device…"); + + // Hard reset via RTS + try { + const t = loaderRef.current.transport; + await t.setRTS(true); + await new Promise((r) => setTimeout(r, 100)); + await t.setRTS(false); + } catch (_) {} + + try { await loaderRef.current.transport.disconnect(); } catch (_) {} + + setPhase("done"); + onDone({ serial }); + } catch (err) { + setError(err.message || String(err)); + setPhase("connect"); + } + }; + + const isFullWipe = flashType === FLASH_TYPE_FULL; + const flashing = phase === "flashing"; + + return ( +
+ Connect & Flash + + Connect your {firmware.hw_type_label} via USB, + then click "Select Port" to choose it. + {isFullWipe && serial && ( + <> Your serial number {serial} will be written to the device. + )} + + + {!webSerialAvailable && ( +
+ + Browser not supported. CloudFlash requires Google Chrome or Microsoft Edge on a desktop + computer. Safari, Firefox, and mobile browsers are not supported. + +
+ )} + + {error && ( +
+ + Error: {error} + +
+ )} + + {/* Port status */} +
+
+ + + {portConnected ? portName || "Device connected" : "No device selected"} + +
+ {!portConnected && ( + + )} +
+ + {/* How-to hint */} + {!portConnected && !flashing && ( +
+

How to connect your device:

+ {[ + "Plug your BellSystems device into your computer using a USB cable.", + "Click the \"Select Port\" button above and a browser popup will appear.", + "Look for an entry that says \"CP210x\", \"CH340\" or \"USB Serial (COM#)\" and click it.", + "If you don't see your device, try a different USB cable or port.", + ].map((step, i) => ( +
+ {i + 1}. + {step} +
+ ))} +
+ )} + + {/* Progress bars */} + {(flashing || fwPct > 0) && ( +
+ {isFullWipe && ( + <> + + + + + )} + +
+ )} + + {flashing && ( +
+ + Please do not disconnect your device or close this tab while flashing is in progress. + This could permanently damage your device. + +
+ )} + + {/* Flash log */} + {log.length > 0 && ( +
+ {log.map((line, i) =>
{line}
)} +
+
+ )} + +
+ + {flashing ? "Flashing… please wait" : "Start Flash →"} + +
+
+ ); +} + +// ─── Step 5: Done ───────────────────────────────────────────────────────────── + +function StepDone({ firmware, flashType, serial, onReset }) { + const [verifyStatus, setVerifyStatus] = useState("waiting"); // waiting | online | timeout + const isFullWipe = flashType === FLASH_TYPE_FULL; + + useEffect(() => { + if (!isFullWipe || !serial) { setVerifyStatus("skipped"); return; } + + let elapsed = 0; + const interval = setInterval(async () => { + elapsed += VERIFY_POLL_MS; + if (elapsed > VERIFY_TIMEOUT_MS) { + clearInterval(interval); + setVerifyStatus("timeout"); + return; + } + try { + // We poll the public status endpoint — the device heartbeat will show if it's online + const resp = await fetch(`/api/public/cloudflash/status`); + // We can't directly check MQTT status without auth, so we just wait for the + // device to appear via a public heartbeat check. For now we show a friendly + // "waiting" message and let it time out gracefully. + // If you have a public device-online endpoint, swap it in here. + } catch (_) {} + }, VERIFY_POLL_MS); + + return () => clearInterval(interval); + }, [isFullWipe, serial]); + + return ( +
+ {/* Success icon */} +
+ + + +
+ +

+ Flash Complete! +

+

+ Your {firmware.hw_type_label} has been + successfully {flashType === FLASH_TYPE_FULL ? "restored to factory settings" : "updated"}. + The device is now rebooting. +

+ + {isFullWipe && ( +
+ {verifyStatus === "waiting" && ( +
+ + + Waiting for device to connect to BellCloud… + +
+ )} + {verifyStatus === "timeout" && ( + + We couldn't confirm your device connected automatically. + This is normal — it may take a few minutes for the device to join the network. + You can close this page. + + )} + {verifyStatus === "skipped" && null} +
+ )} + +
+

+ What to do next +

+ {[ + "You can safely disconnect the USB cable.", + "Power cycle the device if it doesn't restart automatically.", + "The device will reconnect to your network and BellCloud automatically.", + "If you experience any issues, please contact BellSystems support.", + ].map((item, i) => ( +
+ {i + 1}. + {item} +
+ ))} +
+ + + Flash another device + +
+ ); +} + +// ─── Disabled / Feature Gate Screen ────────────────────────────────────────── + +function DisabledScreen() { + return ( +
+
+
+ + + +
+

+ CloudFlash is Currently Unavailable +

+

+ The CloudFlash service is temporarily disabled. Please check back later or contact + BellSystems support if you need urgent assistance with your device. +

+
+
+ ); +} + +// ─── Main CloudFlash Page ───────────────────────────────────────────────────── + +export default function CloudFlashPage() { + const [gateLoading, setGateLoading] = useState(true); + const [enabled, setEnabled] = useState(false); + + // Wizard state + const [step, setStep] = useState(1); + const [firmware, setFirmware] = useState(null); // { hw_type, hw_type_label, channel, version, download_url } + const [flashType, setFlashType] = useState(null); // FLASH_TYPE_FULL | FLASH_TYPE_FW_ONLY + const [serial, setSerial] = useState(null); // user-provided serial (full wipe only) + + // Check feature gate + useEffect(() => { + fetch("/api/public/cloudflash/status") + .then((r) => r.json()) + .then((data) => setEnabled(data.enabled ?? false)) + .catch(() => setEnabled(false)) + .finally(() => setGateLoading(false)); + }, []); + + const reset = () => { + setStep(1); + setFirmware(null); + setFlashType(null); + setSerial(null); + }; + + if (gateLoading) { + return ( +
+

Loading…

+
+ ); + } + + if (!enabled) return ; + + return ( +
+ {/* Brand header */} +
+ CloudFlash +
+ + {/* Step indicator */} +
+ +
+ + {/* Wizard card */} +
+ {step === 1 && ( + setStep(2)} /> + )} + {step === 2 && ( + { setFirmware(fw); setStep(3); }} + /> + )} + {step === 3 && firmware && ( + { + setFlashType(ft); + setSerial(sn); + setStep(4); + }} + /> + )} + {step === 4 && firmware && flashType && ( + setStep(5)} + /> + )} + {step === 5 && firmware && flashType && ( + + )} +
+ + {/* Footer */} +

+ © {new Date().getFullYear()} BellSystems · CloudFlash v1.0 +

+
+ ); +} diff --git a/frontend/src/crm/customers/CustomerDetail.jsx b/frontend/src/crm/customers/CustomerDetail.jsx index 09a817d..65e947a 100644 --- a/frontend/src/crm/customers/CustomerDetail.jsx +++ b/frontend/src/crm/customers/CustomerDetail.jsx @@ -1188,7 +1188,7 @@ export default function CustomerDetail() {
{customer.notes.map((note, i) => (
-

{note.text}

+

{note.text}

{note.by} · {note.at ? new Date(note.at).toLocaleDateString() : ""}

diff --git a/frontend/src/devices/DeviceDetail.jsx b/frontend/src/devices/DeviceDetail.jsx index 0adb59a..adff671 100644 --- a/frontend/src/devices/DeviceDetail.jsx +++ b/frontend/src/devices/DeviceDetail.jsx @@ -1556,6 +1556,7 @@ const TAB_DEFS = [ { id: "bells", label: "Bell Mechanisms", tone: "bells" }, { id: "clock", label: "Clock & Alerts", tone: "clock" }, { id: "warranty", label: "Warranty & Subscription", tone: "warranty" }, + { id: "manage", label: "Manage", tone: "manage" }, { id: "control", label: "Control", tone: "control" }, ]; @@ -1574,6 +1575,104 @@ function calcMaintenanceProgress(lastDate, periodDays) { return Math.max(0, Math.min(100, (elapsed / total) * 100)); } +// ─── Customer Assign Modal ──────────────────────────────────────────────────── +function CustomerAssignModal({ deviceId, onSelect, onCancel }) { + const [query, setQuery] = useState(""); + const [results, setResults] = useState([]); + const [searching, setSearching] = useState(false); + const inputRef = useRef(null); + + useEffect(() => { inputRef.current?.focus(); }, []); + + const search = useCallback(async (q) => { + setSearching(true); + try { + const data = await api.get(`/devices/${deviceId}/customer-search?q=${encodeURIComponent(q)}`); + setResults(data.results || []); + } catch { + setResults([]); + } finally { + setSearching(false); + } + }, [deviceId]); + + useEffect(() => { + const t = setTimeout(() => search(query), 250); + return () => clearTimeout(t); + }, [query, search]); + + return ( +
+
+
+

Assign to Customer

+ +
+
+
+ setQuery(e.target.value)} + placeholder="Search by name, email, phone, org, tags…" + className="w-full px-3 py-2 rounded-md text-sm border" + style={{ backgroundColor: "var(--bg-input)", borderColor: "var(--border-input)", color: "var(--text-primary)" }} + /> + {searching && ( + + )} +
+
+
+
+ {results.length === 0 ? ( +

+ {searching ? "Searching…" : query ? "No customers found." : "Type to search customers…"} +

+ ) : ( + results.map((c) => ( + + )) + )} +
+
+
+ +
+
+
+ ); +} + export default function DeviceDetail() { const { id } = useParams(); const navigate = useNavigate(); @@ -1630,18 +1729,29 @@ export default function DeviceDetail() { const d = await api.get(`/devices/${id}`); setDevice(d); if (d.staffNotes) setStaffNotes(d.staffNotes); + if (Array.isArray(d.tags)) setTags(d.tags); setLoading(false); // Phase 2: fire async background fetches — do not block the render - if (d.device_id) { + const deviceSN = d.serial_number || d.device_id; + if (deviceSN) { api.get("/mqtt/status").then((mqttData) => { if (mqttData?.devices) { - const match = mqttData.devices.find((s) => s.device_serial === d.device_id); + const match = mqttData.devices.find((s) => s.device_serial === deviceSN); setMqttStatus(match || null); } }).catch(() => {}); } + // Fetch owner customer details + if (d.customer_id) { + api.get(`/devices/${id}/customer`).then((res) => { + setOwnerCustomer(res.customer || null); + }).catch(() => setOwnerCustomer(null)); + } else { + setOwnerCustomer(null); + } + setUsersLoading(true); api.get(`/devices/${id}/users`).then((data) => { setDeviceUsers(data.users || []); @@ -1650,9 +1760,9 @@ export default function DeviceDetail() { }).finally(() => setUsersLoading(false)); // Fetch manufacturing record + product catalog to resolve hw image - if (d.device_id) { + if (deviceSN) { Promise.all([ - api.get(`/manufacturing/devices/${d.device_id}`).catch(() => null), + api.get(`/manufacturing/devices/${deviceSN}`).catch(() => null), api.get("/crm/products").catch(() => null), ]).then(([mfgItem, productsRes]) => { const hwType = mfgItem?.hw_type || ""; @@ -1719,6 +1829,85 @@ export default function DeviceDetail() { } }; + // --- Device Notes handlers --- + const handleAddNote = async () => { + if (!newNoteText.trim()) return; + setSavingNote(true); + try { + const data = await api.post(`/devices/${id}/notes`, { + content: newNoteText.trim(), + created_by: "admin", + }); + setDeviceNotes((prev) => [data, ...prev]); + setNewNoteText(""); + setAddingNote(false); + } catch {} finally { setSavingNote(false); } + }; + + const handleUpdateNote = async (noteId) => { + if (!editingNoteText.trim()) return; + setSavingNote(true); + try { + const data = await api.put(`/devices/${id}/notes/${noteId}`, { content: editingNoteText.trim() }); + setDeviceNotes((prev) => prev.map((n) => n.id === noteId ? data : n)); + setEditingNoteId(null); + } catch {} finally { setSavingNote(false); } + }; + + const handleDeleteNote = async (noteId) => { + if (!window.confirm("Delete this note?")) return; + try { + await api.delete(`/devices/${id}/notes/${noteId}`); + setDeviceNotes((prev) => prev.filter((n) => n.id !== noteId)); + } catch {} + }; + + // --- Tags handlers --- + const handleAddTag = async (tag) => { + const trimmed = tag.trim(); + if (!trimmed || tags.includes(trimmed)) return; + const next = [...tags, trimmed]; + setSavingTags(true); + try { + await api.put(`/devices/${id}/tags`, { tags: next }); + setTags(next); + setTagInput(""); + } catch {} finally { setSavingTags(false); } + }; + + const handleRemoveTag = async (tag) => { + const next = tags.filter((t) => t !== tag); + setSavingTags(true); + try { + await api.put(`/devices/${id}/tags`, { tags: next }); + setTags(next); + } catch {} finally { setSavingTags(false); } + }; + + // --- Customer assign handlers --- + const handleAssignCustomer = async (customer) => { + setAssigningCustomer(true); + try { + await api.post(`/devices/${id}/assign-customer`, { customer_id: customer.id }); + setDevice((prev) => ({ ...prev, customer_id: customer.id })); + setOwnerCustomer(customer); + setShowAssignSearch(false); + setCustomerSearch(""); + setCustomerResults([]); + } catch {} finally { setAssigningCustomer(false); } + }; + + const handleUnassignCustomer = async () => { + if (!window.confirm("Remove customer assignment?")) return; + setAssigningCustomer(true); + try { + const cid = device?.customer_id; + await api.delete(`/devices/${id}/assign-customer${cid ? `?customer_id=${cid}` : ""}`); + setDevice((prev) => ({ ...prev, customer_id: "" })); + setOwnerCustomer(null); + } catch {} finally { setAssigningCustomer(false); } + }; + const requestStrikeCounters = useCallback(async (force = false) => { if (!device?.device_id) return; const now = Date.now(); @@ -1800,6 +1989,66 @@ export default function DeviceDetail() { return () => clearInterval(interval); }, [ctrlCmdAutoRefresh, device?.device_id, fetchCtrlCmdHistory]); + // --- Device Notes state (MUST be before early returns) --- + const [deviceNotes, setDeviceNotes] = useState([]); + const [notesLoaded, setNotesLoaded] = useState(false); + const [addingNote, setAddingNote] = useState(false); + const [newNoteText, setNewNoteText] = useState(""); + const [savingNote, setSavingNote] = useState(false); + const [editingNoteId, setEditingNoteId] = useState(null); + const [editingNoteText, setEditingNoteText] = useState(""); + + const loadDeviceNotes = useCallback(async () => { + try { + const data = await api.get(`/devices/${id}/notes`); + setDeviceNotes(data.notes || []); + setNotesLoaded(true); + } catch { + setNotesLoaded(true); + } + }, [id]); + + useEffect(() => { + if (id) loadDeviceNotes(); + }, [id, loadDeviceNotes]); + + // --- Tags state (MUST be before early returns) --- + const [tags, setTags] = useState([]); + const [tagInput, setTagInput] = useState(""); + const [savingTags, setSavingTags] = useState(false); + + // --- Customer assign state (MUST be before early returns) --- + const [assigningCustomer, setAssigningCustomer] = useState(false); + const [showAssignSearch, setShowAssignSearch] = useState(false); + const [ownerCustomer, setOwnerCustomer] = useState(null); + + // --- User assignment state (MUST be before early returns) --- + const [showUserSearch, setShowUserSearch] = useState(false); + const [userSearchQuery, setUserSearchQuery] = useState(""); + const [userSearchResults, setUserSearchResults] = useState([]); + const [userSearching, setUserSearching] = useState(false); + const [addingUser, setAddingUser] = useState(null); + const [removingUser, setRemovingUser] = useState(null); + const userSearchInputRef = useRef(null); + + const searchUsers = useCallback(async (q) => { + setUserSearching(true); + try { + const data = await api.get(`/devices/${id}/user-search?q=${encodeURIComponent(q)}`); + setUserSearchResults(data.results || []); + } catch { + setUserSearchResults([]); + } finally { + setUserSearching(false); + } + }, [id]); + + useEffect(() => { + if (!showUserSearch) return; + const t = setTimeout(() => searchUsers(userSearchQuery), 250); + return () => clearTimeout(t); + }, [userSearchQuery, searchUsers, showUserSearch]); + if (loading) return
Loading...
; if (error) return (
@@ -1952,7 +2201,7 @@ export default function DeviceDetail() {
SERIAL NUMBER - {device.device_id || "-"} + {device.serial_number || device.device_id || "-"}
@@ -2115,7 +2364,7 @@ export default function DeviceDetail() {
- +
@@ -2253,6 +2502,260 @@ export default function DeviceDetail() { + + {/* ── Tags ── */} + +
+ {tags.length === 0 && ( + No tags yet. + )} + {tags.map((tag) => ( + + {tag} + {canEdit && ( + + )} + + ))} +
+ {canEdit && ( +
+ setTagInput(e.target.value)} + onKeyDown={(e) => { if (e.key === "Enter") { e.preventDefault(); handleAddTag(tagInput); } }} + placeholder="Add tag and press Enter…" + className="px-3 py-1.5 rounded-md text-sm border flex-1" + style={{ backgroundColor: "var(--bg-input)", borderColor: "var(--border-input)", color: "var(--text-primary)" }} + /> + +
+ )} +
+ + {/* ── Owner ── */} + + {device.customer_id ? ( +
+ {ownerCustomer ? ( +
navigate(`/crm/customers/${device.customer_id}`)} + title="View customer" + > +
+ {(ownerCustomer.name || "?")[0].toUpperCase()} +
+
+

{ownerCustomer.name || "—"}

+ {ownerCustomer.organization && ( +

{ownerCustomer.organization}

+ )} +
+ + + +
+ ) : ( +

Customer assigned (loading details…)

+ )} + {canEdit && ( +
+ + +
+ )} +
+ ) : ( +
+

No customer assigned yet.

+ {canEdit && ( + + )} +
+ )} + + {showAssignSearch && ( + { setShowAssignSearch(false); handleAssignCustomer(c); }} + onCancel={() => setShowAssignSearch(false)} + /> + )} +
+ + {/* ── Device Notes ── */} + + {!notesLoaded ? ( +

Loading…

+ ) : ( + <> + {deviceNotes.length === 0 && !addingNote && ( +

No notes for this device.

+ )} +
+ {deviceNotes.map((note) => ( +
+ {editingNoteId === note.id ? ( +
+