update: overhauled firmware ui. Added public flash page.
This commit is contained in:
@@ -5,23 +5,34 @@ from database import get_db
|
||||
logger = logging.getLogger("builder.database")
|
||||
|
||||
|
||||
async def insert_built_melody(melody_id: str, name: str, pid: str, steps: str) -> None:
|
||||
async def insert_built_melody(melody_id: str, name: str, pid: str, steps: str, is_builtin: bool = False) -> None:
|
||||
db = await get_db()
|
||||
await db.execute(
|
||||
"""INSERT INTO built_melodies (id, name, pid, steps, assigned_melody_ids)
|
||||
VALUES (?, ?, ?, ?, ?)""",
|
||||
(melody_id, name, pid, steps, json.dumps([])),
|
||||
"""INSERT INTO built_melodies (id, name, pid, steps, assigned_melody_ids, is_builtin)
|
||||
VALUES (?, ?, ?, ?, ?, ?)""",
|
||||
(melody_id, name, pid, steps, json.dumps([]), 1 if is_builtin else 0),
|
||||
)
|
||||
await db.commit()
|
||||
|
||||
|
||||
async def update_built_melody(melody_id: str, name: str, pid: str, steps: str) -> None:
|
||||
async def update_built_melody(melody_id: str, name: str, pid: str, steps: str, is_builtin: bool = False) -> None:
|
||||
db = await get_db()
|
||||
await db.execute(
|
||||
"""UPDATE built_melodies
|
||||
SET name = ?, pid = ?, steps = ?, updated_at = datetime('now')
|
||||
SET name = ?, pid = ?, steps = ?, is_builtin = ?, updated_at = datetime('now')
|
||||
WHERE id = ?""",
|
||||
(name, pid, steps, melody_id),
|
||||
(name, pid, steps, 1 if is_builtin else 0, melody_id),
|
||||
)
|
||||
await db.commit()
|
||||
|
||||
|
||||
async def update_builtin_flag(melody_id: str, is_builtin: bool) -> None:
|
||||
db = await get_db()
|
||||
await db.execute(
|
||||
"""UPDATE built_melodies
|
||||
SET is_builtin = ?, updated_at = datetime('now')
|
||||
WHERE id = ?""",
|
||||
(1 if is_builtin else 0, melody_id),
|
||||
)
|
||||
await db.commit()
|
||||
|
||||
@@ -68,6 +79,7 @@ async def get_built_melody(melody_id: str) -> dict | None:
|
||||
return None
|
||||
row = dict(rows[0])
|
||||
row["assigned_melody_ids"] = json.loads(row["assigned_melody_ids"] or "[]")
|
||||
row["is_builtin"] = bool(row.get("is_builtin", 0))
|
||||
return row
|
||||
|
||||
|
||||
@@ -80,6 +92,7 @@ async def list_built_melodies() -> list[dict]:
|
||||
for row in rows:
|
||||
r = dict(row)
|
||||
r["assigned_melody_ids"] = json.loads(r["assigned_melody_ids"] or "[]")
|
||||
r["is_builtin"] = bool(r.get("is_builtin", 0))
|
||||
results.append(r)
|
||||
return results
|
||||
|
||||
|
||||
@@ -6,12 +6,14 @@ class BuiltMelodyCreate(BaseModel):
|
||||
name: str
|
||||
pid: str
|
||||
steps: str # raw step string e.g. "1,2,2+1,1,2,3+1"
|
||||
is_builtin: bool = False
|
||||
|
||||
|
||||
class BuiltMelodyUpdate(BaseModel):
|
||||
name: Optional[str] = None
|
||||
pid: Optional[str] = None
|
||||
steps: Optional[str] = None
|
||||
is_builtin: Optional[bool] = None
|
||||
|
||||
|
||||
class BuiltMelodyInDB(BaseModel):
|
||||
@@ -19,6 +21,7 @@ class BuiltMelodyInDB(BaseModel):
|
||||
name: str
|
||||
pid: str
|
||||
steps: str
|
||||
is_builtin: bool = False
|
||||
binary_path: Optional[str] = None
|
||||
binary_url: Optional[str] = None
|
||||
progmem_code: Optional[str] = None
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from fastapi.responses import FileResponse
|
||||
from fastapi.responses import FileResponse, PlainTextResponse
|
||||
from auth.models import TokenPayload
|
||||
from auth.dependencies import require_permission
|
||||
from builder.models import (
|
||||
@@ -20,6 +20,7 @@ async def list_built_melodies(
|
||||
melodies = await service.list_built_melodies()
|
||||
return BuiltMelodyListResponse(melodies=melodies, total=len(melodies))
|
||||
|
||||
|
||||
@router.get("/for-melody/{firestore_melody_id}")
|
||||
async def get_for_firestore_melody(
|
||||
firestore_melody_id: str,
|
||||
@@ -32,6 +33,14 @@ async def get_for_firestore_melody(
|
||||
return result.model_dump()
|
||||
|
||||
|
||||
@router.get("/generate-builtin-list")
|
||||
async def generate_builtin_list(
|
||||
_user: TokenPayload = Depends(require_permission("melodies", "view")),
|
||||
):
|
||||
"""Generate a C++ header with PROGMEM arrays for all is_builtin archetypes."""
|
||||
code = await service.generate_builtin_list()
|
||||
return PlainTextResponse(content=code, media_type="text/plain")
|
||||
|
||||
|
||||
@router.get("/{melody_id}", response_model=BuiltMelodyInDB)
|
||||
async def get_built_melody(
|
||||
@@ -66,6 +75,15 @@ async def delete_built_melody(
|
||||
await service.delete_built_melody(melody_id)
|
||||
|
||||
|
||||
@router.post("/{melody_id}/toggle-builtin", response_model=BuiltMelodyInDB)
|
||||
async def toggle_builtin(
|
||||
melody_id: str,
|
||||
_user: TokenPayload = Depends(require_permission("melodies", "edit")),
|
||||
):
|
||||
"""Toggle the is_builtin flag for an archetype."""
|
||||
return await service.toggle_builtin(melody_id)
|
||||
|
||||
|
||||
@router.post("/{melody_id}/build-binary", response_model=BuiltMelodyInDB)
|
||||
async def build_binary(
|
||||
melody_id: str,
|
||||
|
||||
@@ -32,6 +32,7 @@ def _row_to_built_melody(row: dict) -> BuiltMelodyInDB:
|
||||
name=row["name"],
|
||||
pid=row["pid"],
|
||||
steps=row["steps"],
|
||||
is_builtin=row.get("is_builtin", False),
|
||||
binary_path=binary_path,
|
||||
binary_url=binary_url,
|
||||
progmem_code=row.get("progmem_code"),
|
||||
@@ -151,8 +152,12 @@ async def create_built_melody(data: BuiltMelodyCreate) -> BuiltMelodyInDB:
|
||||
name=data.name,
|
||||
pid=data.pid,
|
||||
steps=data.steps,
|
||||
is_builtin=data.is_builtin,
|
||||
)
|
||||
return await get_built_melody(melody_id)
|
||||
# Auto-build binary and builtin code on creation
|
||||
result = await get_built_melody(melody_id)
|
||||
result = await _do_build(melody_id)
|
||||
return result
|
||||
|
||||
|
||||
async def update_built_melody(melody_id: str, data: BuiltMelodyUpdate) -> BuiltMelodyInDB:
|
||||
@@ -163,11 +168,22 @@ async def update_built_melody(melody_id: str, data: BuiltMelodyUpdate) -> BuiltM
|
||||
new_name = data.name if data.name is not None else row["name"]
|
||||
new_pid = data.pid if data.pid is not None else row["pid"]
|
||||
new_steps = data.steps if data.steps is not None else row["steps"]
|
||||
new_is_builtin = data.is_builtin if data.is_builtin is not None else row.get("is_builtin", False)
|
||||
|
||||
await _check_unique(new_name, new_pid or "", exclude_id=melody_id)
|
||||
|
||||
await db.update_built_melody(melody_id, name=new_name, pid=new_pid, steps=new_steps)
|
||||
return await get_built_melody(melody_id)
|
||||
steps_changed = (data.steps is not None) and (data.steps != row["steps"])
|
||||
|
||||
await db.update_built_melody(melody_id, name=new_name, pid=new_pid, steps=new_steps, is_builtin=new_is_builtin)
|
||||
|
||||
# If steps changed, flag all assigned melodies as outdated, then rebuild
|
||||
if steps_changed:
|
||||
assigned_ids = row.get("assigned_melody_ids", [])
|
||||
if assigned_ids:
|
||||
await _flag_melodies_outdated(assigned_ids, True)
|
||||
|
||||
# Auto-rebuild binary and builtin code on every save
|
||||
return await _do_build(melody_id)
|
||||
|
||||
|
||||
async def delete_built_melody(melody_id: str) -> None:
|
||||
@@ -175,6 +191,11 @@ async def delete_built_melody(melody_id: str) -> None:
|
||||
if not row:
|
||||
raise HTTPException(status_code=404, detail=f"Built melody '{melody_id}' not found")
|
||||
|
||||
# Flag all assigned melodies as outdated before deleting
|
||||
assigned_ids = row.get("assigned_melody_ids", [])
|
||||
if assigned_ids:
|
||||
await _flag_melodies_outdated(assigned_ids, True)
|
||||
|
||||
# Delete the .bsm file if it exists
|
||||
if row.get("binary_path"):
|
||||
bsm_path = Path(row["binary_path"])
|
||||
@@ -184,10 +205,26 @@ async def delete_built_melody(melody_id: str) -> None:
|
||||
await db.delete_built_melody(melody_id)
|
||||
|
||||
|
||||
async def toggle_builtin(melody_id: str) -> BuiltMelodyInDB:
|
||||
"""Toggle the is_builtin flag for an archetype."""
|
||||
row = await db.get_built_melody(melody_id)
|
||||
if not row:
|
||||
raise HTTPException(status_code=404, detail=f"Built melody '{melody_id}' not found")
|
||||
new_value = not row.get("is_builtin", False)
|
||||
await db.update_builtin_flag(melody_id, new_value)
|
||||
return await get_built_melody(melody_id)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Build Actions
|
||||
# ============================================================================
|
||||
|
||||
async def _do_build(melody_id: str) -> BuiltMelodyInDB:
|
||||
"""Internal: build both binary and PROGMEM code, return updated record."""
|
||||
await build_binary(melody_id)
|
||||
return await build_builtin_code(melody_id)
|
||||
|
||||
|
||||
async def build_binary(melody_id: str) -> BuiltMelodyInDB:
|
||||
"""Parse steps and write a .bsm binary file to storage."""
|
||||
row = await db.get_built_melody(melody_id)
|
||||
@@ -236,6 +273,48 @@ async def get_binary_path(melody_id: str) -> Optional[Path]:
|
||||
return path
|
||||
|
||||
|
||||
async def generate_builtin_list() -> str:
|
||||
"""Generate a C++ header with PROGMEM arrays for all is_builtin archetypes."""
|
||||
rows = await db.list_built_melodies()
|
||||
builtin_rows = [r for r in rows if r.get("is_builtin")]
|
||||
|
||||
if not builtin_rows:
|
||||
return "// No built-in archetypes defined.\n"
|
||||
|
||||
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
parts = [
|
||||
f"// Auto-generated Built-in Archetype List",
|
||||
f"// Generated: {timestamp}",
|
||||
f"// Total built-ins: {len(builtin_rows)}",
|
||||
"",
|
||||
"#pragma once",
|
||||
"#include <avr/pgmspace.h>",
|
||||
"",
|
||||
]
|
||||
|
||||
entry_refs = []
|
||||
for row in builtin_rows:
|
||||
values = steps_string_to_values(row["steps"])
|
||||
array_name = f"melody_builtin_{row['name'].lower().replace(' ', '_')}"
|
||||
display_name = row["name"].replace("_", " ").title()
|
||||
pid = row.get("pid") or f"builtin_{row['name'].lower()}"
|
||||
|
||||
parts.append(f"// {display_name} | PID: {pid} | Steps: {len(values)}")
|
||||
parts.append(format_melody_array(row["name"].lower().replace(" ", "_"), values))
|
||||
parts.append("")
|
||||
entry_refs.append((display_name, pid, array_name, len(values)))
|
||||
|
||||
# Generate MELODY_LIBRARY array
|
||||
parts.append("// --- MELODY_LIBRARY entries ---")
|
||||
parts.append("// Add these to your firmware's MELODY_LIBRARY[] array:")
|
||||
parts.append("// {")
|
||||
for display_name, pid, array_name, step_count in entry_refs:
|
||||
parts.append(f'// {{ "{display_name}", "{pid}", {array_name}, {step_count} }},')
|
||||
parts.append("// };")
|
||||
|
||||
return "\n".join(parts)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Assignment
|
||||
# ============================================================================
|
||||
@@ -251,6 +330,9 @@ async def assign_to_melody(built_id: str, firestore_melody_id: str) -> BuiltMelo
|
||||
assigned.append(firestore_melody_id)
|
||||
await db.update_assigned_melody_ids(built_id, assigned)
|
||||
|
||||
# Clear outdated flag on the melody being assigned
|
||||
await _flag_melodies_outdated([firestore_melody_id], False)
|
||||
|
||||
return await get_built_melody(built_id)
|
||||
|
||||
|
||||
@@ -262,6 +344,10 @@ async def unassign_from_melody(built_id: str, firestore_melody_id: str) -> Built
|
||||
|
||||
assigned = [mid for mid in row.get("assigned_melody_ids", []) if mid != firestore_melody_id]
|
||||
await db.update_assigned_melody_ids(built_id, assigned)
|
||||
|
||||
# Flag the melody as outdated since it no longer has an archetype
|
||||
await _flag_melodies_outdated([firestore_melody_id], True)
|
||||
|
||||
return await get_built_melody(built_id)
|
||||
|
||||
|
||||
@@ -272,3 +358,48 @@ async def get_built_melody_for_firestore_id(firestore_melody_id: str) -> Optiona
|
||||
if firestore_melody_id in row.get("assigned_melody_ids", []):
|
||||
return _row_to_built_melody(row)
|
||||
return None
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Outdated Flag Helpers
|
||||
# ============================================================================
|
||||
|
||||
async def _flag_melodies_outdated(melody_ids: List[str], outdated: bool) -> None:
|
||||
"""Set or clear the outdated_archetype flag on a list of Firestore melody IDs.
|
||||
|
||||
This updates both SQLite (melody_drafts) and Firestore (published melodies).
|
||||
We import inline to avoid circular imports.
|
||||
"""
|
||||
if not melody_ids:
|
||||
return
|
||||
|
||||
try:
|
||||
from melodies import database as melody_db
|
||||
from shared.firebase import get_db as get_firestore
|
||||
except ImportError:
|
||||
logger.warning("Could not import melody/firebase modules — skipping outdated flag update")
|
||||
return
|
||||
|
||||
firestore_db = get_firestore()
|
||||
|
||||
for melody_id in melody_ids:
|
||||
try:
|
||||
row = await melody_db.get_melody(melody_id)
|
||||
if not row:
|
||||
continue
|
||||
|
||||
data = row["data"]
|
||||
info = dict(data.get("information", {}))
|
||||
info["outdated_archetype"] = outdated
|
||||
data["information"] = info
|
||||
|
||||
await melody_db.update_melody(melody_id, data)
|
||||
|
||||
# If published, also update Firestore
|
||||
if row.get("status") == "published":
|
||||
doc_ref = firestore_db.collection("melodies").document(melody_id)
|
||||
doc_ref.update({"information.outdated_archetype": outdated})
|
||||
|
||||
logger.info(f"Set outdated_archetype={outdated} on melody {melody_id}")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to set outdated flag on melody {melody_id}: {e}")
|
||||
|
||||
@@ -207,6 +207,7 @@ async def init_db():
|
||||
"ALTER TABLE crm_media ADD COLUMN thumbnail_path TEXT",
|
||||
"ALTER TABLE crm_quotation_items ADD COLUMN description_en TEXT",
|
||||
"ALTER TABLE crm_quotation_items ADD COLUMN description_gr TEXT",
|
||||
"ALTER TABLE built_melodies ADD COLUMN is_builtin INTEGER NOT NULL DEFAULT 0",
|
||||
]
|
||||
for m in _migrations:
|
||||
try:
|
||||
|
||||
@@ -126,6 +126,12 @@ class DeviceCreate(BaseModel):
|
||||
websocket_url: str = ""
|
||||
churchAssistantURL: str = ""
|
||||
staffNotes: str = ""
|
||||
hw_family: str = ""
|
||||
hw_revision: str = ""
|
||||
tags: List[str] = []
|
||||
serial_number: str = ""
|
||||
customer_id: str = ""
|
||||
mfg_status: str = ""
|
||||
|
||||
|
||||
class DeviceUpdate(BaseModel):
|
||||
@@ -145,10 +151,16 @@ class DeviceUpdate(BaseModel):
|
||||
websocket_url: Optional[str] = None
|
||||
churchAssistantURL: Optional[str] = None
|
||||
staffNotes: Optional[str] = None
|
||||
hw_family: Optional[str] = None
|
||||
hw_revision: Optional[str] = None
|
||||
tags: Optional[List[str]] = None
|
||||
customer_id: Optional[str] = None
|
||||
mfg_status: Optional[str] = None
|
||||
|
||||
|
||||
class DeviceInDB(DeviceCreate):
|
||||
id: str
|
||||
# Legacy field — kept for backwards compat; new docs use serial_number
|
||||
device_id: str = ""
|
||||
|
||||
|
||||
@@ -157,6 +169,15 @@ class DeviceListResponse(BaseModel):
|
||||
total: int
|
||||
|
||||
|
||||
class DeviceNoteCreate(BaseModel):
|
||||
content: str
|
||||
created_by: str = ""
|
||||
|
||||
|
||||
class DeviceNoteUpdate(BaseModel):
|
||||
content: str
|
||||
|
||||
|
||||
class DeviceUserInfo(BaseModel):
|
||||
"""User info resolved from device_users sub-collection or user_list."""
|
||||
user_id: str = ""
|
||||
|
||||
@@ -1,17 +1,25 @@
|
||||
from fastapi import APIRouter, Depends, Query
|
||||
from typing import Optional
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from fastapi import APIRouter, Depends, Query, HTTPException
|
||||
from typing import Optional, List
|
||||
from pydantic import BaseModel
|
||||
from auth.models import TokenPayload
|
||||
from auth.dependencies import require_permission
|
||||
from devices.models import (
|
||||
DeviceCreate, DeviceUpdate, DeviceInDB, DeviceListResponse,
|
||||
DeviceUsersResponse, DeviceUserInfo,
|
||||
DeviceNoteCreate, DeviceNoteUpdate,
|
||||
)
|
||||
from devices import service
|
||||
import database as mqtt_db
|
||||
from mqtt.models import DeviceAlertEntry, DeviceAlertsResponse
|
||||
from shared.firebase import get_db as get_firestore
|
||||
|
||||
router = APIRouter(prefix="/api/devices", tags=["devices"])
|
||||
|
||||
NOTES_COLLECTION = "notes"
|
||||
CRM_COLLECTION = "crm_customers"
|
||||
|
||||
|
||||
@router.get("", response_model=DeviceListResponse)
|
||||
async def list_devices(
|
||||
@@ -79,3 +87,375 @@ async def get_device_alerts(
|
||||
"""Return the current active alert set for a device. Empty list means fully healthy."""
|
||||
rows = await mqtt_db.get_alerts(device_id)
|
||||
return DeviceAlertsResponse(alerts=[DeviceAlertEntry(**r) for r in rows])
|
||||
|
||||
|
||||
# ─────────────────────────────────────────────────────────────────────────────
|
||||
# Device Notes
|
||||
# ─────────────────────────────────────────────────────────────────────────────
|
||||
|
||||
@router.get("/{device_id}/notes")
|
||||
async def list_device_notes(
|
||||
device_id: str,
|
||||
_user: TokenPayload = Depends(require_permission("devices", "view")),
|
||||
):
|
||||
"""List all notes for a device."""
|
||||
db = get_firestore()
|
||||
docs = db.collection(NOTES_COLLECTION).where("device_id", "==", device_id).order_by("created_at").stream()
|
||||
notes = []
|
||||
for doc in docs:
|
||||
note = doc.to_dict()
|
||||
note["id"] = doc.id
|
||||
# Convert Firestore Timestamps to ISO strings
|
||||
for f in ("created_at", "updated_at"):
|
||||
if hasattr(note.get(f), "isoformat"):
|
||||
note[f] = note[f].isoformat()
|
||||
notes.append(note)
|
||||
return {"notes": notes, "total": len(notes)}
|
||||
|
||||
|
||||
@router.post("/{device_id}/notes", status_code=201)
|
||||
async def create_device_note(
|
||||
device_id: str,
|
||||
body: DeviceNoteCreate,
|
||||
_user: TokenPayload = Depends(require_permission("devices", "edit")),
|
||||
):
|
||||
"""Create a new note for a device."""
|
||||
db = get_firestore()
|
||||
now = datetime.utcnow()
|
||||
note_id = str(uuid.uuid4())
|
||||
note_data = {
|
||||
"device_id": device_id,
|
||||
"content": body.content,
|
||||
"created_by": body.created_by or _user.name or "",
|
||||
"created_at": now,
|
||||
"updated_at": now,
|
||||
}
|
||||
db.collection(NOTES_COLLECTION).document(note_id).set(note_data)
|
||||
note_data["id"] = note_id
|
||||
note_data["created_at"] = now.isoformat()
|
||||
note_data["updated_at"] = now.isoformat()
|
||||
return note_data
|
||||
|
||||
|
||||
@router.put("/{device_id}/notes/{note_id}")
|
||||
async def update_device_note(
|
||||
device_id: str,
|
||||
note_id: str,
|
||||
body: DeviceNoteUpdate,
|
||||
_user: TokenPayload = Depends(require_permission("devices", "edit")),
|
||||
):
|
||||
"""Update an existing device note."""
|
||||
db = get_firestore()
|
||||
doc_ref = db.collection(NOTES_COLLECTION).document(note_id)
|
||||
doc = doc_ref.get()
|
||||
if not doc.exists or doc.to_dict().get("device_id") != device_id:
|
||||
raise HTTPException(status_code=404, detail="Note not found")
|
||||
now = datetime.utcnow()
|
||||
doc_ref.update({"content": body.content, "updated_at": now})
|
||||
updated = doc.to_dict()
|
||||
updated["id"] = note_id
|
||||
updated["content"] = body.content
|
||||
updated["updated_at"] = now.isoformat()
|
||||
if hasattr(updated.get("created_at"), "isoformat"):
|
||||
updated["created_at"] = updated["created_at"].isoformat()
|
||||
return updated
|
||||
|
||||
|
||||
@router.delete("/{device_id}/notes/{note_id}", status_code=204)
|
||||
async def delete_device_note(
|
||||
device_id: str,
|
||||
note_id: str,
|
||||
_user: TokenPayload = Depends(require_permission("devices", "edit")),
|
||||
):
|
||||
"""Delete a device note."""
|
||||
db = get_firestore()
|
||||
doc_ref = db.collection(NOTES_COLLECTION).document(note_id)
|
||||
doc = doc_ref.get()
|
||||
if not doc.exists or doc.to_dict().get("device_id") != device_id:
|
||||
raise HTTPException(status_code=404, detail="Note not found")
|
||||
doc_ref.delete()
|
||||
|
||||
|
||||
# ─────────────────────────────────────────────────────────────────────────────
|
||||
# Device Tags
|
||||
# ─────────────────────────────────────────────────────────────────────────────
|
||||
|
||||
class TagsUpdate(BaseModel):
|
||||
tags: List[str]
|
||||
|
||||
|
||||
@router.put("/{device_id}/tags", response_model=DeviceInDB)
|
||||
async def update_device_tags(
|
||||
device_id: str,
|
||||
body: TagsUpdate,
|
||||
_user: TokenPayload = Depends(require_permission("devices", "edit")),
|
||||
):
|
||||
"""Replace the tags list for a device."""
|
||||
return service.update_device(device_id, DeviceUpdate(tags=body.tags))
|
||||
|
||||
|
||||
# ─────────────────────────────────────────────────────────────────────────────
|
||||
# Assign Device to Customer
|
||||
# ─────────────────────────────────────────────────────────────────────────────
|
||||
|
||||
class CustomerSearchResult(BaseModel):
|
||||
id: str
|
||||
name: str
|
||||
email: str
|
||||
organization: str = ""
|
||||
|
||||
|
||||
class AssignCustomerBody(BaseModel):
|
||||
customer_id: str
|
||||
label: str = ""
|
||||
|
||||
|
||||
@router.get("/{device_id}/customer-search")
|
||||
async def search_customers_for_device(
|
||||
device_id: str,
|
||||
q: str = Query(""),
|
||||
_user: TokenPayload = Depends(require_permission("devices", "view")),
|
||||
):
|
||||
"""Search customers by name, email, phone, org, or tags, returning top 20 matches."""
|
||||
db = get_firestore()
|
||||
docs = db.collection(CRM_COLLECTION).stream()
|
||||
results = []
|
||||
q_lower = q.lower().strip()
|
||||
for doc in docs:
|
||||
data = doc.to_dict()
|
||||
name = data.get("name", "") or ""
|
||||
surname = data.get("surname", "") or ""
|
||||
email = data.get("email", "") or ""
|
||||
organization = data.get("organization", "") or ""
|
||||
phone = data.get("phone", "") or ""
|
||||
tags = " ".join(data.get("tags", []) or [])
|
||||
location = data.get("location") or {}
|
||||
city = location.get("city", "") or ""
|
||||
searchable = f"{name} {surname} {email} {organization} {phone} {tags} {city}".lower()
|
||||
if not q_lower or q_lower in searchable:
|
||||
results.append({
|
||||
"id": doc.id,
|
||||
"name": name,
|
||||
"surname": surname,
|
||||
"email": email,
|
||||
"organization": organization,
|
||||
"city": city,
|
||||
})
|
||||
if len(results) >= 20:
|
||||
break
|
||||
return {"results": results}
|
||||
|
||||
|
||||
@router.post("/{device_id}/assign-customer")
|
||||
async def assign_device_to_customer(
|
||||
device_id: str,
|
||||
body: AssignCustomerBody,
|
||||
_user: TokenPayload = Depends(require_permission("devices", "edit")),
|
||||
):
|
||||
"""Assign a device to a customer.
|
||||
|
||||
- Sets owner field on the device document.
|
||||
- Adds a console_device entry to the customer's owned_items list.
|
||||
"""
|
||||
db = get_firestore()
|
||||
|
||||
# Verify device exists
|
||||
device = service.get_device(device_id)
|
||||
|
||||
# Get customer
|
||||
customer_ref = db.collection(CRM_COLLECTION).document(body.customer_id)
|
||||
customer_doc = customer_ref.get()
|
||||
if not customer_doc.exists:
|
||||
raise HTTPException(status_code=404, detail="Customer not found")
|
||||
customer_data = customer_doc.to_dict()
|
||||
customer_email = customer_data.get("email", "")
|
||||
|
||||
# Update device: owner email + customer_id
|
||||
device_ref = db.collection("devices").document(device_id)
|
||||
device_ref.update({"owner": customer_email, "customer_id": body.customer_id})
|
||||
|
||||
# Add to customer owned_items (avoid duplicates)
|
||||
owned_items = customer_data.get("owned_items", []) or []
|
||||
already_assigned = any(
|
||||
item.get("type") == "console_device" and item.get("console_device", {}).get("device_id") == device_id
|
||||
for item in owned_items
|
||||
)
|
||||
if not already_assigned:
|
||||
owned_items.append({
|
||||
"type": "console_device",
|
||||
"console_device": {
|
||||
"device_id": device_id,
|
||||
"label": body.label or device.device_name or device_id,
|
||||
}
|
||||
})
|
||||
customer_ref.update({"owned_items": owned_items})
|
||||
|
||||
return {"status": "assigned", "device_id": device_id, "customer_id": body.customer_id}
|
||||
|
||||
|
||||
@router.delete("/{device_id}/assign-customer", status_code=204)
|
||||
async def unassign_device_from_customer(
|
||||
device_id: str,
|
||||
customer_id: str = Query(...),
|
||||
_user: TokenPayload = Depends(require_permission("devices", "edit")),
|
||||
):
|
||||
"""Remove device assignment from a customer."""
|
||||
db = get_firestore()
|
||||
|
||||
# Clear customer_id on device
|
||||
device_ref = db.collection("devices").document(device_id)
|
||||
device_ref.update({"customer_id": ""})
|
||||
|
||||
# Remove from customer owned_items
|
||||
customer_ref = db.collection(CRM_COLLECTION).document(customer_id)
|
||||
customer_doc = customer_ref.get()
|
||||
if customer_doc.exists:
|
||||
customer_data = customer_doc.to_dict()
|
||||
owned_items = [
|
||||
item for item in (customer_data.get("owned_items") or [])
|
||||
if not (item.get("type") == "console_device" and item.get("console_device", {}).get("device_id") == device_id)
|
||||
]
|
||||
customer_ref.update({"owned_items": owned_items})
|
||||
|
||||
|
||||
# ─────────────────────────────────────────────────────────────────────────────
|
||||
# Customer detail (for Owner display in fleet)
|
||||
# ─────────────────────────────────────────────────────────────────────────────
|
||||
|
||||
@router.get("/{device_id}/customer")
|
||||
async def get_device_customer(
|
||||
device_id: str,
|
||||
_user: TokenPayload = Depends(require_permission("devices", "view")),
|
||||
):
|
||||
"""Return basic customer details for a device's assigned customer_id."""
|
||||
db = get_firestore()
|
||||
device_ref = db.collection("devices").document(device_id)
|
||||
device_doc = device_ref.get()
|
||||
if not device_doc.exists:
|
||||
raise HTTPException(status_code=404, detail="Device not found")
|
||||
device_data = device_doc.to_dict() or {}
|
||||
customer_id = device_data.get("customer_id")
|
||||
if not customer_id:
|
||||
return {"customer": None}
|
||||
customer_doc = db.collection(CRM_COLLECTION).document(customer_id).get()
|
||||
if not customer_doc.exists:
|
||||
return {"customer": None}
|
||||
cd = customer_doc.to_dict() or {}
|
||||
return {
|
||||
"customer": {
|
||||
"id": customer_doc.id,
|
||||
"name": cd.get("name") or "",
|
||||
"email": cd.get("email") or "",
|
||||
"organization": cd.get("organization") or "",
|
||||
"phone": cd.get("phone") or "",
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
# ─────────────────────────────────────────────────────────────────────────────
|
||||
# User list management (for Manage tab — assign/remove users from user_list)
|
||||
# ─────────────────────────────────────────────────────────────────────────────
|
||||
|
||||
class UserSearchResult(BaseModel):
|
||||
id: str
|
||||
display_name: str = ""
|
||||
email: str = ""
|
||||
phone: str = ""
|
||||
photo_url: str = ""
|
||||
|
||||
|
||||
@router.get("/{device_id}/user-search")
|
||||
async def search_users_for_device(
|
||||
device_id: str,
|
||||
q: str = Query(""),
|
||||
_user: TokenPayload = Depends(require_permission("devices", "view")),
|
||||
):
|
||||
"""Search the users collection by name, email, or phone."""
|
||||
db = get_firestore()
|
||||
docs = db.collection("users").stream()
|
||||
results = []
|
||||
q_lower = q.lower().strip()
|
||||
for doc in docs:
|
||||
data = doc.to_dict() or {}
|
||||
name = (data.get("display_name") or "").lower()
|
||||
email = (data.get("email") or "").lower()
|
||||
phone = (data.get("phone") or "").lower()
|
||||
if not q_lower or q_lower in name or q_lower in email or q_lower in phone:
|
||||
results.append({
|
||||
"id": doc.id,
|
||||
"display_name": data.get("display_name") or "",
|
||||
"email": data.get("email") or "",
|
||||
"phone": data.get("phone") or "",
|
||||
"photo_url": data.get("photo_url") or "",
|
||||
})
|
||||
if len(results) >= 20:
|
||||
break
|
||||
return {"results": results}
|
||||
|
||||
|
||||
class AddUserBody(BaseModel):
|
||||
user_id: str
|
||||
|
||||
|
||||
@router.post("/{device_id}/user-list", status_code=200)
|
||||
async def add_user_to_device(
|
||||
device_id: str,
|
||||
body: AddUserBody,
|
||||
_user: TokenPayload = Depends(require_permission("devices", "edit")),
|
||||
):
|
||||
"""Add a user reference to the device's user_list field."""
|
||||
db = get_firestore()
|
||||
device_ref = db.collection("devices").document(device_id)
|
||||
device_doc = device_ref.get()
|
||||
if not device_doc.exists:
|
||||
raise HTTPException(status_code=404, detail="Device not found")
|
||||
|
||||
# Verify user exists
|
||||
user_doc = db.collection("users").document(body.user_id).get()
|
||||
if not user_doc.exists:
|
||||
raise HTTPException(status_code=404, detail="User not found")
|
||||
|
||||
data = device_doc.to_dict() or {}
|
||||
user_list = data.get("user_list", []) or []
|
||||
|
||||
# Avoid duplicates — check both string paths and DocumentReferences
|
||||
from google.cloud.firestore_v1 import DocumentReference as DocRef
|
||||
existing_ids = set()
|
||||
for entry in user_list:
|
||||
if isinstance(entry, DocRef):
|
||||
existing_ids.add(entry.id)
|
||||
elif isinstance(entry, str):
|
||||
existing_ids.add(entry.split("/")[-1])
|
||||
|
||||
if body.user_id not in existing_ids:
|
||||
user_ref = db.collection("users").document(body.user_id)
|
||||
user_list.append(user_ref)
|
||||
device_ref.update({"user_list": user_list})
|
||||
|
||||
return {"status": "added", "user_id": body.user_id}
|
||||
|
||||
|
||||
@router.delete("/{device_id}/user-list/{user_id}", status_code=200)
|
||||
async def remove_user_from_device(
|
||||
device_id: str,
|
||||
user_id: str,
|
||||
_user: TokenPayload = Depends(require_permission("devices", "edit")),
|
||||
):
|
||||
"""Remove a user reference from the device's user_list field."""
|
||||
db = get_firestore()
|
||||
device_ref = db.collection("devices").document(device_id)
|
||||
device_doc = device_ref.get()
|
||||
if not device_doc.exists:
|
||||
raise HTTPException(status_code=404, detail="Device not found")
|
||||
|
||||
data = device_doc.to_dict() or {}
|
||||
user_list = data.get("user_list", []) or []
|
||||
|
||||
# Remove any entry that resolves to this user_id
|
||||
new_list = [
|
||||
entry for entry in user_list
|
||||
if not (isinstance(entry, str) and entry.split("/")[-1] == user_id)
|
||||
]
|
||||
device_ref.update({"user_list": new_list})
|
||||
|
||||
return {"status": "removed", "user_id": user_id}
|
||||
|
||||
@@ -52,10 +52,11 @@ def _generate_serial_number() -> str:
|
||||
def _ensure_unique_serial(db) -> str:
|
||||
"""Generate a serial number and verify it doesn't already exist in Firestore."""
|
||||
existing_sns = set()
|
||||
for doc in db.collection(COLLECTION).select(["device_id"]).stream():
|
||||
for doc in db.collection(COLLECTION).select(["serial_number"]).stream():
|
||||
data = doc.to_dict()
|
||||
if data.get("device_id"):
|
||||
existing_sns.add(data["device_id"])
|
||||
sn = data.get("serial_number") or data.get("device_id")
|
||||
if sn:
|
||||
existing_sns.add(sn)
|
||||
|
||||
for _ in range(100): # safety limit
|
||||
sn = _generate_serial_number()
|
||||
@@ -95,18 +96,40 @@ def _sanitize_dict(d: dict) -> dict:
|
||||
return result
|
||||
|
||||
|
||||
def _auto_upgrade_claimed(doc_ref, data: dict) -> dict:
|
||||
"""If the device has entries in user_list and isn't already claimed/decommissioned,
|
||||
upgrade mfg_status to 'claimed' automatically and return the updated data dict."""
|
||||
current_status = data.get("mfg_status", "")
|
||||
if current_status in ("claimed", "decommissioned"):
|
||||
return data
|
||||
user_list = data.get("user_list", []) or []
|
||||
if user_list:
|
||||
doc_ref.update({"mfg_status": "claimed"})
|
||||
data = dict(data)
|
||||
data["mfg_status"] = "claimed"
|
||||
return data
|
||||
|
||||
|
||||
def _doc_to_device(doc) -> DeviceInDB:
|
||||
"""Convert a Firestore document snapshot to a DeviceInDB model."""
|
||||
data = _sanitize_dict(doc.to_dict())
|
||||
"""Convert a Firestore document snapshot to a DeviceInDB model.
|
||||
|
||||
Also auto-upgrades mfg_status to 'claimed' if user_list is non-empty.
|
||||
"""
|
||||
raw = doc.to_dict()
|
||||
raw = _auto_upgrade_claimed(doc.reference, raw)
|
||||
data = _sanitize_dict(raw)
|
||||
return DeviceInDB(id=doc.id, **data)
|
||||
|
||||
|
||||
FLEET_STATUSES = {"sold", "claimed"}
|
||||
|
||||
|
||||
def list_devices(
|
||||
search: str | None = None,
|
||||
online_only: bool | None = None,
|
||||
subscription_tier: str | None = None,
|
||||
) -> list[DeviceInDB]:
|
||||
"""List devices with optional filters."""
|
||||
"""List fleet devices (sold + claimed only) with optional filters."""
|
||||
db = get_db()
|
||||
ref = db.collection(COLLECTION)
|
||||
query = ref
|
||||
@@ -118,6 +141,14 @@ def list_devices(
|
||||
results = []
|
||||
|
||||
for doc in docs:
|
||||
raw = doc.to_dict() or {}
|
||||
|
||||
# Only include sold/claimed devices in the fleet view.
|
||||
# Legacy devices without mfg_status are included to avoid breaking old data.
|
||||
mfg_status = raw.get("mfg_status")
|
||||
if mfg_status and mfg_status not in FLEET_STATUSES:
|
||||
continue
|
||||
|
||||
device = _doc_to_device(doc)
|
||||
|
||||
# Client-side filters
|
||||
@@ -128,7 +159,7 @@ def list_devices(
|
||||
search_lower = search.lower()
|
||||
name_match = search_lower in (device.device_name or "").lower()
|
||||
location_match = search_lower in (device.device_location or "").lower()
|
||||
sn_match = search_lower in (device.device_id or "").lower()
|
||||
sn_match = search_lower in (device.serial_number or "").lower()
|
||||
if not (name_match or location_match or sn_match):
|
||||
continue
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ from shared.firebase import get_db
|
||||
from shared.exceptions import NotFoundError
|
||||
from equipment.models import NoteCreate, NoteUpdate, NoteInDB
|
||||
|
||||
COLLECTION = "equipment_notes"
|
||||
COLLECTION = "notes"
|
||||
|
||||
VALID_CATEGORIES = {"general", "maintenance", "installation", "issue", "action_item", "other"}
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ class UpdateType(str, Enum):
|
||||
|
||||
class FirmwareVersion(BaseModel):
|
||||
id: str
|
||||
hw_type: str # e.g. "vesper", "vesper_plus", "vesper_pro"
|
||||
hw_type: str # e.g. "vesper", "vesper_plus", "vesper_pro", "bespoke"
|
||||
channel: str # "stable", "beta", "alpha", "testing"
|
||||
version: str # semver e.g. "1.5"
|
||||
filename: str
|
||||
@@ -20,8 +20,10 @@ class FirmwareVersion(BaseModel):
|
||||
update_type: UpdateType = UpdateType.mandatory
|
||||
min_fw_version: Optional[str] = None # minimum fw version required to install this
|
||||
uploaded_at: str
|
||||
notes: Optional[str] = None
|
||||
changelog: Optional[str] = None
|
||||
release_note: Optional[str] = None
|
||||
is_latest: bool = False
|
||||
bespoke_uid: Optional[str] = None # only set when hw_type == "bespoke"
|
||||
|
||||
|
||||
class FirmwareListResponse(BaseModel):
|
||||
@@ -57,7 +59,7 @@ class FirmwareMetadataResponse(BaseModel):
|
||||
min_fw_version: Optional[str] = None
|
||||
download_url: str
|
||||
uploaded_at: str
|
||||
notes: Optional[str] = None
|
||||
release_note: Optional[str] = None
|
||||
|
||||
|
||||
# Keep backwards-compatible alias
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from fastapi import APIRouter, Depends, Query, UploadFile, File, Form
|
||||
from fastapi.responses import FileResponse
|
||||
from fastapi import APIRouter, Depends, Query, UploadFile, File, Form, HTTPException
|
||||
from fastapi.responses import FileResponse, PlainTextResponse
|
||||
from pydantic import BaseModel
|
||||
from typing import Optional
|
||||
import logging
|
||||
@@ -22,7 +22,9 @@ async def upload_firmware(
|
||||
version: str = Form(...),
|
||||
update_type: UpdateType = Form(UpdateType.mandatory),
|
||||
min_fw_version: Optional[str] = Form(None),
|
||||
notes: Optional[str] = Form(None),
|
||||
changelog: Optional[str] = Form(None),
|
||||
release_note: Optional[str] = Form(None),
|
||||
bespoke_uid: Optional[str] = Form(None),
|
||||
file: UploadFile = File(...),
|
||||
_user: TokenPayload = Depends(require_permission("manufacturing", "add")),
|
||||
):
|
||||
@@ -34,7 +36,9 @@ async def upload_firmware(
|
||||
file_bytes=file_bytes,
|
||||
update_type=update_type,
|
||||
min_fw_version=min_fw_version,
|
||||
notes=notes,
|
||||
changelog=changelog,
|
||||
release_note=release_note,
|
||||
bespoke_uid=bespoke_uid,
|
||||
)
|
||||
|
||||
|
||||
@@ -61,6 +65,18 @@ def get_latest_firmware(
|
||||
return service.get_latest(hw_type, channel, hw_version=hw_version, current_version=current_version)
|
||||
|
||||
|
||||
@router.get("/{hw_type}/{channel}/latest/changelog", response_class=PlainTextResponse)
|
||||
def get_latest_changelog(hw_type: str, channel: str):
|
||||
"""Returns the full changelog for the latest firmware. Plain text."""
|
||||
return service.get_latest_changelog(hw_type, channel)
|
||||
|
||||
|
||||
@router.get("/{hw_type}/{channel}/{version}/info/changelog", response_class=PlainTextResponse)
|
||||
def get_version_changelog(hw_type: str, channel: str, version: str):
|
||||
"""Returns the full changelog for a specific firmware version. Plain text."""
|
||||
return service.get_version_changelog(hw_type, channel, version)
|
||||
|
||||
|
||||
@router.get("/{hw_type}/{channel}/{version}/info", response_model=FirmwareMetadataResponse)
|
||||
def get_firmware_info(hw_type: str, channel: str, version: str):
|
||||
"""Returns metadata for a specific firmware version.
|
||||
@@ -80,6 +96,33 @@ def download_firmware(hw_type: str, channel: str, version: str):
|
||||
)
|
||||
|
||||
|
||||
@router.put("/{firmware_id}", response_model=FirmwareVersion)
|
||||
async def edit_firmware(
|
||||
firmware_id: str,
|
||||
channel: Optional[str] = Form(None),
|
||||
version: Optional[str] = Form(None),
|
||||
update_type: Optional[UpdateType] = Form(None),
|
||||
min_fw_version: Optional[str] = Form(None),
|
||||
changelog: Optional[str] = Form(None),
|
||||
release_note: Optional[str] = Form(None),
|
||||
bespoke_uid: Optional[str] = Form(None),
|
||||
file: Optional[UploadFile] = File(None),
|
||||
_user: TokenPayload = Depends(require_permission("manufacturing", "add")),
|
||||
):
|
||||
file_bytes = await file.read() if file and file.filename else None
|
||||
return service.edit_firmware(
|
||||
doc_id=firmware_id,
|
||||
channel=channel,
|
||||
version=version,
|
||||
update_type=update_type,
|
||||
min_fw_version=min_fw_version,
|
||||
changelog=changelog,
|
||||
release_note=release_note,
|
||||
bespoke_uid=bespoke_uid,
|
||||
file_bytes=file_bytes,
|
||||
)
|
||||
|
||||
|
||||
@router.delete("/{firmware_id}", status_code=204)
|
||||
def delete_firmware(
|
||||
firmware_id: str,
|
||||
|
||||
@@ -16,7 +16,7 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
COLLECTION = "firmware_versions"
|
||||
|
||||
VALID_HW_TYPES = {"vesper", "vesper_plus", "vesper_pro", "chronos", "chronos_pro", "agnus", "agnus_mini"}
|
||||
VALID_HW_TYPES = {"vesper", "vesper_plus", "vesper_pro", "chronos", "chronos_pro", "agnus", "agnus_mini", "bespoke"}
|
||||
VALID_CHANNELS = {"stable", "beta", "alpha", "testing"}
|
||||
|
||||
|
||||
@@ -43,8 +43,10 @@ def _doc_to_firmware_version(doc) -> FirmwareVersion:
|
||||
update_type=data.get("update_type", UpdateType.mandatory),
|
||||
min_fw_version=data.get("min_fw_version"),
|
||||
uploaded_at=uploaded_str,
|
||||
notes=data.get("notes"),
|
||||
changelog=data.get("changelog"),
|
||||
release_note=data.get("release_note"),
|
||||
is_latest=data.get("is_latest", False),
|
||||
bespoke_uid=data.get("bespoke_uid"),
|
||||
)
|
||||
|
||||
|
||||
@@ -65,7 +67,7 @@ def _fw_to_metadata_response(fw: FirmwareVersion) -> FirmwareMetadataResponse:
|
||||
min_fw_version=fw.min_fw_version,
|
||||
download_url=download_url,
|
||||
uploaded_at=fw.uploaded_at,
|
||||
notes=fw.notes,
|
||||
release_note=fw.release_note,
|
||||
)
|
||||
|
||||
|
||||
@@ -76,33 +78,59 @@ def upload_firmware(
|
||||
file_bytes: bytes,
|
||||
update_type: UpdateType = UpdateType.mandatory,
|
||||
min_fw_version: str | None = None,
|
||||
notes: str | None = None,
|
||||
changelog: str | None = None,
|
||||
release_note: str | None = None,
|
||||
bespoke_uid: str | None = None,
|
||||
) -> FirmwareVersion:
|
||||
if hw_type not in VALID_HW_TYPES:
|
||||
raise HTTPException(status_code=400, detail=f"Invalid hw_type. Must be one of: {', '.join(sorted(VALID_HW_TYPES))}")
|
||||
if channel not in VALID_CHANNELS:
|
||||
raise HTTPException(status_code=400, detail=f"Invalid channel. Must be one of: {', '.join(sorted(VALID_CHANNELS))}")
|
||||
if hw_type == "bespoke" and not bespoke_uid:
|
||||
raise HTTPException(status_code=400, detail="bespoke_uid is required when hw_type is 'bespoke'")
|
||||
|
||||
db = get_db()
|
||||
sha256 = hashlib.sha256(file_bytes).hexdigest()
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
# For bespoke firmware: if a firmware with the same bespoke_uid already exists,
|
||||
# overwrite it (delete old doc + file, reuse same storage path keyed by uid).
|
||||
if hw_type == "bespoke" and bespoke_uid:
|
||||
existing_docs = list(
|
||||
db.collection(COLLECTION)
|
||||
.where("hw_type", "==", "bespoke")
|
||||
.where("bespoke_uid", "==", bespoke_uid)
|
||||
.stream()
|
||||
)
|
||||
for old_doc in existing_docs:
|
||||
old_data = old_doc.to_dict() or {}
|
||||
old_path = _storage_path("bespoke", old_data.get("channel", channel), old_data.get("version", version))
|
||||
if old_path.exists():
|
||||
old_path.unlink()
|
||||
try:
|
||||
old_path.parent.rmdir()
|
||||
except OSError:
|
||||
pass
|
||||
old_doc.reference.delete()
|
||||
|
||||
dest = _storage_path(hw_type, channel, version)
|
||||
dest.parent.mkdir(parents=True, exist_ok=True)
|
||||
dest.write_bytes(file_bytes)
|
||||
|
||||
sha256 = hashlib.sha256(file_bytes).hexdigest()
|
||||
now = datetime.now(timezone.utc)
|
||||
doc_id = str(uuid.uuid4())
|
||||
|
||||
db = get_db()
|
||||
|
||||
# Mark previous latest for this hw_type+channel as no longer latest
|
||||
prev_docs = (
|
||||
db.collection(COLLECTION)
|
||||
.where("hw_type", "==", hw_type)
|
||||
.where("channel", "==", channel)
|
||||
.where("is_latest", "==", True)
|
||||
.stream()
|
||||
)
|
||||
for prev in prev_docs:
|
||||
prev.reference.update({"is_latest": False})
|
||||
# (skip for bespoke — each bespoke_uid is its own independent firmware)
|
||||
if hw_type != "bespoke":
|
||||
prev_docs = (
|
||||
db.collection(COLLECTION)
|
||||
.where("hw_type", "==", hw_type)
|
||||
.where("channel", "==", channel)
|
||||
.where("is_latest", "==", True)
|
||||
.stream()
|
||||
)
|
||||
for prev in prev_docs:
|
||||
prev.reference.update({"is_latest": False})
|
||||
|
||||
doc_ref = db.collection(COLLECTION).document(doc_id)
|
||||
doc_ref.set({
|
||||
@@ -115,8 +143,10 @@ def upload_firmware(
|
||||
"update_type": update_type.value,
|
||||
"min_fw_version": min_fw_version,
|
||||
"uploaded_at": now,
|
||||
"notes": notes,
|
||||
"changelog": changelog,
|
||||
"release_note": release_note,
|
||||
"is_latest": True,
|
||||
"bespoke_uid": bespoke_uid,
|
||||
})
|
||||
|
||||
return _doc_to_firmware_version(doc_ref.get())
|
||||
@@ -142,6 +172,8 @@ def list_firmware(
|
||||
def get_latest(hw_type: str, channel: str, hw_version: str | None = None, current_version: str | None = None) -> FirmwareMetadataResponse:
|
||||
if hw_type not in VALID_HW_TYPES:
|
||||
raise HTTPException(status_code=400, detail=f"Invalid hw_type '{hw_type}'")
|
||||
if hw_type == "bespoke":
|
||||
raise HTTPException(status_code=400, detail="Bespoke firmware is not served via auto-update. Use the direct download URL.")
|
||||
if channel not in VALID_CHANNELS:
|
||||
raise HTTPException(status_code=400, detail=f"Invalid channel '{channel}'")
|
||||
|
||||
@@ -182,6 +214,52 @@ def get_version_info(hw_type: str, channel: str, version: str) -> FirmwareMetada
|
||||
return _fw_to_metadata_response(_doc_to_firmware_version(docs[0]))
|
||||
|
||||
|
||||
def get_latest_changelog(hw_type: str, channel: str) -> str:
|
||||
if hw_type not in VALID_HW_TYPES:
|
||||
raise HTTPException(status_code=400, detail=f"Invalid hw_type '{hw_type}'")
|
||||
if channel not in VALID_CHANNELS:
|
||||
raise HTTPException(status_code=400, detail=f"Invalid channel '{channel}'")
|
||||
|
||||
db = get_db()
|
||||
docs = list(
|
||||
db.collection(COLLECTION)
|
||||
.where("hw_type", "==", hw_type)
|
||||
.where("channel", "==", channel)
|
||||
.where("is_latest", "==", True)
|
||||
.limit(1)
|
||||
.stream()
|
||||
)
|
||||
if not docs:
|
||||
raise NotFoundError("Firmware")
|
||||
fw = _doc_to_firmware_version(docs[0])
|
||||
if not fw.changelog:
|
||||
raise NotFoundError("Changelog")
|
||||
return fw.changelog
|
||||
|
||||
|
||||
def get_version_changelog(hw_type: str, channel: str, version: str) -> str:
|
||||
if hw_type not in VALID_HW_TYPES:
|
||||
raise HTTPException(status_code=400, detail=f"Invalid hw_type '{hw_type}'")
|
||||
if channel not in VALID_CHANNELS:
|
||||
raise HTTPException(status_code=400, detail=f"Invalid channel '{channel}'")
|
||||
|
||||
db = get_db()
|
||||
docs = list(
|
||||
db.collection(COLLECTION)
|
||||
.where("hw_type", "==", hw_type)
|
||||
.where("channel", "==", channel)
|
||||
.where("version", "==", version)
|
||||
.limit(1)
|
||||
.stream()
|
||||
)
|
||||
if not docs:
|
||||
raise NotFoundError("Firmware version")
|
||||
fw = _doc_to_firmware_version(docs[0])
|
||||
if not fw.changelog:
|
||||
raise NotFoundError("Changelog")
|
||||
return fw.changelog
|
||||
|
||||
|
||||
def get_firmware_path(hw_type: str, channel: str, version: str) -> Path:
|
||||
path = _storage_path(hw_type, channel, version)
|
||||
if not path.exists():
|
||||
@@ -205,6 +283,82 @@ def record_ota_event(event_type: str, payload: dict[str, Any]) -> None:
|
||||
logger.warning("Failed to persist OTA event (%s): %s", event_type, exc)
|
||||
|
||||
|
||||
def edit_firmware(
|
||||
doc_id: str,
|
||||
channel: str | None = None,
|
||||
version: str | None = None,
|
||||
update_type: UpdateType | None = None,
|
||||
min_fw_version: str | None = None,
|
||||
changelog: str | None = None,
|
||||
release_note: str | None = None,
|
||||
bespoke_uid: str | None = None,
|
||||
file_bytes: bytes | None = None,
|
||||
) -> FirmwareVersion:
|
||||
db = get_db()
|
||||
doc_ref = db.collection(COLLECTION).document(doc_id)
|
||||
doc = doc_ref.get()
|
||||
if not doc.exists:
|
||||
raise NotFoundError("Firmware")
|
||||
|
||||
data = doc.to_dict() or {}
|
||||
hw_type = data["hw_type"]
|
||||
old_channel = data.get("channel", "")
|
||||
old_version = data.get("version", "")
|
||||
|
||||
effective_channel = channel if channel is not None else old_channel
|
||||
effective_version = version if version is not None else old_version
|
||||
|
||||
if channel is not None and channel not in VALID_CHANNELS:
|
||||
raise HTTPException(status_code=400, detail=f"Invalid channel. Must be one of: {', '.join(sorted(VALID_CHANNELS))}")
|
||||
|
||||
updates: dict = {}
|
||||
if channel is not None:
|
||||
updates["channel"] = channel
|
||||
if version is not None:
|
||||
updates["version"] = version
|
||||
if update_type is not None:
|
||||
updates["update_type"] = update_type.value
|
||||
if min_fw_version is not None:
|
||||
updates["min_fw_version"] = min_fw_version if min_fw_version else None
|
||||
if changelog is not None:
|
||||
updates["changelog"] = changelog if changelog else None
|
||||
if release_note is not None:
|
||||
updates["release_note"] = release_note if release_note else None
|
||||
if bespoke_uid is not None:
|
||||
updates["bespoke_uid"] = bespoke_uid if bespoke_uid else None
|
||||
|
||||
if file_bytes is not None:
|
||||
# Move binary if path changed
|
||||
old_path = _storage_path(hw_type, old_channel, old_version)
|
||||
new_path = _storage_path(hw_type, effective_channel, effective_version)
|
||||
if old_path != new_path and old_path.exists():
|
||||
old_path.unlink()
|
||||
try:
|
||||
old_path.parent.rmdir()
|
||||
except OSError:
|
||||
pass
|
||||
new_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
new_path.write_bytes(file_bytes)
|
||||
updates["sha256"] = hashlib.sha256(file_bytes).hexdigest()
|
||||
updates["size_bytes"] = len(file_bytes)
|
||||
elif (channel is not None and channel != old_channel) or (version is not None and version != old_version):
|
||||
# Path changed but no new file — move existing binary
|
||||
old_path = _storage_path(hw_type, old_channel, old_version)
|
||||
new_path = _storage_path(hw_type, effective_channel, effective_version)
|
||||
if old_path.exists() and old_path != new_path:
|
||||
new_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
old_path.rename(new_path)
|
||||
try:
|
||||
old_path.parent.rmdir()
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
if updates:
|
||||
doc_ref.update(updates)
|
||||
|
||||
return _doc_to_firmware_version(doc_ref.get())
|
||||
|
||||
|
||||
def delete_firmware(doc_id: str) -> None:
|
||||
db = get_db()
|
||||
doc_ref = db.collection(COLLECTION).document(doc_id)
|
||||
|
||||
@@ -24,6 +24,7 @@ from crm.comms_router import router as crm_comms_router
|
||||
from crm.media_router import router as crm_media_router
|
||||
from crm.nextcloud_router import router as crm_nextcloud_router
|
||||
from crm.quotations_router import router as crm_quotations_router
|
||||
from public.router import router as public_router
|
||||
from crm.nextcloud import close_client as close_nextcloud_client, keepalive_ping as nextcloud_keepalive
|
||||
from crm.mail_accounts import get_mail_accounts
|
||||
from mqtt.client import mqtt_manager
|
||||
@@ -67,6 +68,7 @@ app.include_router(crm_comms_router)
|
||||
app.include_router(crm_media_router)
|
||||
app.include_router(crm_nextcloud_router)
|
||||
app.include_router(crm_quotations_router)
|
||||
app.include_router(public_router)
|
||||
|
||||
|
||||
async def nextcloud_keepalive_loop():
|
||||
|
||||
@@ -55,6 +55,13 @@ class MfgStatus(str, Enum):
|
||||
decommissioned = "decommissioned"
|
||||
|
||||
|
||||
class LifecycleEntry(BaseModel):
|
||||
status_id: str
|
||||
date: str # ISO 8601 UTC string
|
||||
note: Optional[str] = None
|
||||
set_by: Optional[str] = None
|
||||
|
||||
|
||||
class BatchCreate(BaseModel):
|
||||
board_type: BoardType
|
||||
board_version: str = Field(
|
||||
@@ -84,6 +91,9 @@ class DeviceInventoryItem(BaseModel):
|
||||
owner: Optional[str] = None
|
||||
assigned_to: Optional[str] = None
|
||||
device_name: Optional[str] = None
|
||||
lifecycle_history: Optional[List["LifecycleEntry"]] = None
|
||||
customer_id: Optional[str] = None
|
||||
user_list: Optional[List[str]] = None
|
||||
|
||||
|
||||
class DeviceInventoryListResponse(BaseModel):
|
||||
@@ -94,11 +104,19 @@ class DeviceInventoryListResponse(BaseModel):
|
||||
class DeviceStatusUpdate(BaseModel):
|
||||
status: MfgStatus
|
||||
note: Optional[str] = None
|
||||
force_claimed: bool = False
|
||||
|
||||
|
||||
class DeviceAssign(BaseModel):
|
||||
customer_email: str
|
||||
customer_name: Optional[str] = None
|
||||
customer_id: str
|
||||
|
||||
|
||||
class CustomerSearchResult(BaseModel):
|
||||
id: str
|
||||
name: str = ""
|
||||
email: str = ""
|
||||
organization: str = ""
|
||||
phone: str = ""
|
||||
|
||||
|
||||
class RecentActivityItem(BaseModel):
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
from fastapi import APIRouter, Depends, Query, HTTPException, UploadFile, File, Form
|
||||
from fastapi import APIRouter, Depends, Query, HTTPException, UploadFile, File
|
||||
from fastapi.responses import Response
|
||||
from fastapi.responses import RedirectResponse
|
||||
from typing import Optional
|
||||
from pydantic import BaseModel
|
||||
|
||||
from auth.models import TokenPayload
|
||||
from auth.dependencies import require_permission
|
||||
@@ -14,9 +15,23 @@ from manufacturing.models import (
|
||||
from manufacturing import service
|
||||
from manufacturing import audit
|
||||
from shared.exceptions import NotFoundError
|
||||
from shared.firebase import get_db as get_firestore
|
||||
|
||||
|
||||
class LifecycleEntryPatch(BaseModel):
|
||||
index: int
|
||||
date: Optional[str] = None
|
||||
note: Optional[str] = None
|
||||
|
||||
class LifecycleEntryCreate(BaseModel):
|
||||
status_id: str
|
||||
date: Optional[str] = None
|
||||
note: Optional[str] = None
|
||||
|
||||
VALID_FLASH_ASSETS = {"bootloader.bin", "partitions.bin"}
|
||||
VALID_HW_TYPES_MFG = {"vesper", "vesper_plus", "vesper_pro", "agnus", "agnus_mini", "chronos", "chronos_pro"}
|
||||
# Bespoke UIDs are dynamic — we allow any non-empty slug that doesn't clash with
|
||||
# a standard hw_type name. The flash-asset upload endpoint checks this below.
|
||||
|
||||
router = APIRouter(prefix="/api/manufacturing", tags=["manufacturing"])
|
||||
|
||||
@@ -83,13 +98,75 @@ def get_device(
|
||||
return service.get_device_by_sn(sn)
|
||||
|
||||
|
||||
@router.get("/customers/search")
|
||||
def search_customers(
|
||||
q: str = Query(""),
|
||||
_user: TokenPayload = Depends(require_permission("manufacturing", "view")),
|
||||
):
|
||||
"""Search CRM customers by name, email, phone, organization, or tags."""
|
||||
results = service.search_customers(q)
|
||||
return {"results": results}
|
||||
|
||||
|
||||
@router.get("/customers/{customer_id}")
|
||||
def get_customer(
|
||||
customer_id: str,
|
||||
_user: TokenPayload = Depends(require_permission("manufacturing", "view")),
|
||||
):
|
||||
"""Get a single CRM customer by ID."""
|
||||
db = get_firestore()
|
||||
doc = db.collection("crm_customers").document(customer_id).get()
|
||||
if not doc.exists:
|
||||
raise HTTPException(status_code=404, detail="Customer not found")
|
||||
data = doc.to_dict() or {}
|
||||
loc = data.get("location") or {}
|
||||
city = loc.get("city") if isinstance(loc, dict) else None
|
||||
return {
|
||||
"id": doc.id,
|
||||
"name": data.get("name") or "",
|
||||
"surname": data.get("surname") or "",
|
||||
"email": data.get("email") or "",
|
||||
"organization": data.get("organization") or "",
|
||||
"phone": data.get("phone") or "",
|
||||
"city": city or "",
|
||||
}
|
||||
|
||||
|
||||
@router.patch("/devices/{sn}/status", response_model=DeviceInventoryItem)
|
||||
async def update_status(
|
||||
sn: str,
|
||||
body: DeviceStatusUpdate,
|
||||
user: TokenPayload = Depends(require_permission("manufacturing", "edit")),
|
||||
):
|
||||
result = service.update_device_status(sn, body)
|
||||
# Guard: claimed requires at least one user in user_list
|
||||
# (allow if explicitly force_claimed=true, which the mfg UI sets after adding a user manually)
|
||||
if body.status.value == "claimed":
|
||||
db = get_firestore()
|
||||
docs = list(db.collection("devices").where("serial_number", "==", sn).limit(1).stream())
|
||||
if docs:
|
||||
data = docs[0].to_dict() or {}
|
||||
user_list = data.get("user_list", []) or []
|
||||
if not user_list and not getattr(body, "force_claimed", False):
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="Cannot set status to 'claimed': device has no users in user_list. "
|
||||
"Assign a user first, then set to Claimed.",
|
||||
)
|
||||
|
||||
# Guard: sold requires a customer assigned
|
||||
if body.status.value == "sold":
|
||||
db = get_firestore()
|
||||
docs = list(db.collection("devices").where("serial_number", "==", sn).limit(1).stream())
|
||||
if docs:
|
||||
data = docs[0].to_dict() or {}
|
||||
if not data.get("customer_id"):
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="Cannot set status to 'sold' without an assigned customer. "
|
||||
"Use the 'Assign to Customer' action first.",
|
||||
)
|
||||
|
||||
result = service.update_device_status(sn, body, set_by=user.email)
|
||||
await audit.log_action(
|
||||
admin_user=user.email,
|
||||
action="status_updated",
|
||||
@@ -99,12 +176,91 @@ async def update_status(
|
||||
return result
|
||||
|
||||
|
||||
@router.patch("/devices/{sn}/lifecycle", response_model=DeviceInventoryItem)
|
||||
async def patch_lifecycle_entry(
|
||||
sn: str,
|
||||
body: LifecycleEntryPatch,
|
||||
user: TokenPayload = Depends(require_permission("manufacturing", "edit")),
|
||||
):
|
||||
"""Edit the date and/or note of a lifecycle history entry by index."""
|
||||
db = get_firestore()
|
||||
docs = list(db.collection("devices").where("serial_number", "==", sn).limit(1).stream())
|
||||
if not docs:
|
||||
raise HTTPException(status_code=404, detail="Device not found")
|
||||
doc_ref = docs[0].reference
|
||||
data = docs[0].to_dict() or {}
|
||||
history = data.get("lifecycle_history") or []
|
||||
if body.index < 0 or body.index >= len(history):
|
||||
raise HTTPException(status_code=400, detail="Invalid lifecycle entry index")
|
||||
if body.date is not None:
|
||||
history[body.index]["date"] = body.date
|
||||
if body.note is not None:
|
||||
history[body.index]["note"] = body.note
|
||||
doc_ref.update({"lifecycle_history": history})
|
||||
from manufacturing.service import _doc_to_inventory_item
|
||||
return _doc_to_inventory_item(doc_ref.get())
|
||||
|
||||
|
||||
@router.post("/devices/{sn}/lifecycle", response_model=DeviceInventoryItem, status_code=201)
|
||||
async def create_lifecycle_entry(
|
||||
sn: str,
|
||||
body: LifecycleEntryCreate,
|
||||
user: TokenPayload = Depends(require_permission("manufacturing", "edit")),
|
||||
):
|
||||
"""Create a lifecycle history entry for a step that has no entry yet (on-the-fly)."""
|
||||
from datetime import datetime, timezone
|
||||
db = get_firestore()
|
||||
docs = list(db.collection("devices").where("serial_number", "==", sn).limit(1).stream())
|
||||
if not docs:
|
||||
raise HTTPException(status_code=404, detail="Device not found")
|
||||
doc_ref = docs[0].reference
|
||||
data = docs[0].to_dict() or {}
|
||||
history = data.get("lifecycle_history") or []
|
||||
new_entry = {
|
||||
"status_id": body.status_id,
|
||||
"date": body.date or datetime.now(timezone.utc).isoformat(),
|
||||
"note": body.note,
|
||||
"set_by": user.email,
|
||||
}
|
||||
history.append(new_entry)
|
||||
doc_ref.update({"lifecycle_history": history})
|
||||
from manufacturing.service import _doc_to_inventory_item
|
||||
return _doc_to_inventory_item(doc_ref.get())
|
||||
|
||||
|
||||
@router.delete("/devices/{sn}/lifecycle/{index}", response_model=DeviceInventoryItem)
|
||||
async def delete_lifecycle_entry(
|
||||
sn: str,
|
||||
index: int,
|
||||
user: TokenPayload = Depends(require_permission("manufacturing", "edit")),
|
||||
):
|
||||
"""Delete a lifecycle history entry by index. Cannot delete the entry for the current status."""
|
||||
db = get_firestore()
|
||||
docs = list(db.collection("devices").where("serial_number", "==", sn).limit(1).stream())
|
||||
if not docs:
|
||||
raise HTTPException(status_code=404, detail="Device not found")
|
||||
doc_ref = docs[0].reference
|
||||
data = docs[0].to_dict() or {}
|
||||
history = data.get("lifecycle_history") or []
|
||||
if index < 0 or index >= len(history):
|
||||
raise HTTPException(status_code=400, detail="Invalid lifecycle entry index")
|
||||
current_status = data.get("mfg_status", "")
|
||||
if history[index].get("status_id") == current_status:
|
||||
raise HTTPException(status_code=400, detail="Cannot delete the entry for the current status. Change the status first.")
|
||||
history.pop(index)
|
||||
doc_ref.update({"lifecycle_history": history})
|
||||
from manufacturing.service import _doc_to_inventory_item
|
||||
return _doc_to_inventory_item(doc_ref.get())
|
||||
|
||||
|
||||
@router.get("/devices/{sn}/nvs.bin")
|
||||
async def download_nvs(
|
||||
sn: str,
|
||||
hw_type_override: Optional[str] = Query(None, description="Override hw_type written to NVS (for bespoke firmware)"),
|
||||
hw_revision_override: Optional[str] = Query(None, description="Override hw_revision written to NVS (for bespoke firmware)"),
|
||||
user: TokenPayload = Depends(require_permission("manufacturing", "view")),
|
||||
):
|
||||
binary = service.get_nvs_binary(sn)
|
||||
binary = service.get_nvs_binary(sn, hw_type_override=hw_type_override, hw_revision_override=hw_revision_override)
|
||||
await audit.log_action(
|
||||
admin_user=user.email,
|
||||
action="device_flashed",
|
||||
@@ -123,12 +279,15 @@ async def assign_device(
|
||||
body: DeviceAssign,
|
||||
user: TokenPayload = Depends(require_permission("manufacturing", "edit")),
|
||||
):
|
||||
result = service.assign_device(sn, body)
|
||||
try:
|
||||
result = service.assign_device(sn, body)
|
||||
except NotFoundError as e:
|
||||
raise HTTPException(status_code=404, detail=str(e))
|
||||
await audit.log_action(
|
||||
admin_user=user.email,
|
||||
action="device_assigned",
|
||||
serial_number=sn,
|
||||
detail={"customer_email": body.customer_email, "customer_name": body.customer_name},
|
||||
detail={"customer_id": body.customer_id},
|
||||
)
|
||||
return result
|
||||
|
||||
@@ -201,8 +360,9 @@ async def upload_flash_asset(
|
||||
and .pio/build/{env}/partitions.bin). Upload them once per hw_type after
|
||||
each PlatformIO build that changes the partition layout.
|
||||
"""
|
||||
if hw_type not in VALID_HW_TYPES_MFG:
|
||||
raise HTTPException(status_code=400, detail=f"Invalid hw_type. Must be one of: {', '.join(sorted(VALID_HW_TYPES_MFG))}")
|
||||
# hw_type can be a standard board type OR a bespoke UID (any non-empty slug)
|
||||
if not hw_type or len(hw_type) > 128:
|
||||
raise HTTPException(status_code=400, detail="Invalid hw_type/bespoke UID.")
|
||||
if asset not in VALID_FLASH_ASSETS:
|
||||
raise HTTPException(status_code=400, detail=f"Invalid asset. Must be one of: {', '.join(sorted(VALID_FLASH_ASSETS))}")
|
||||
data = await file.read()
|
||||
@@ -212,34 +372,38 @@ async def upload_flash_asset(
|
||||
@router.get("/devices/{sn}/bootloader.bin")
|
||||
def download_bootloader(
|
||||
sn: str,
|
||||
hw_type_override: Optional[str] = Query(None, description="Override hw_type for flash asset lookup (for bespoke firmware)"),
|
||||
_user: TokenPayload = Depends(require_permission("manufacturing", "view")),
|
||||
):
|
||||
"""Return the bootloader.bin for this device's hw_type (flashed at 0x1000)."""
|
||||
item = service.get_device_by_sn(sn)
|
||||
hw_type = hw_type_override or item.hw_type
|
||||
try:
|
||||
data = service.get_flash_asset(item.hw_type, "bootloader.bin")
|
||||
data = service.get_flash_asset(hw_type, "bootloader.bin")
|
||||
except NotFoundError as e:
|
||||
raise HTTPException(status_code=404, detail=str(e))
|
||||
return Response(
|
||||
content=data,
|
||||
media_type="application/octet-stream",
|
||||
headers={"Content-Disposition": f'attachment; filename="bootloader_{item.hw_type}.bin"'},
|
||||
headers={"Content-Disposition": f'attachment; filename="bootloader_{hw_type}.bin"'},
|
||||
)
|
||||
|
||||
|
||||
@router.get("/devices/{sn}/partitions.bin")
|
||||
def download_partitions(
|
||||
sn: str,
|
||||
hw_type_override: Optional[str] = Query(None, description="Override hw_type for flash asset lookup (for bespoke firmware)"),
|
||||
_user: TokenPayload = Depends(require_permission("manufacturing", "view")),
|
||||
):
|
||||
"""Return the partitions.bin for this device's hw_type (flashed at 0x8000)."""
|
||||
item = service.get_device_by_sn(sn)
|
||||
hw_type = hw_type_override or item.hw_type
|
||||
try:
|
||||
data = service.get_flash_asset(item.hw_type, "partitions.bin")
|
||||
data = service.get_flash_asset(hw_type, "partitions.bin")
|
||||
except NotFoundError as e:
|
||||
raise HTTPException(status_code=404, detail=str(e))
|
||||
return Response(
|
||||
content=data,
|
||||
media_type="application/octet-stream",
|
||||
headers={"Content-Disposition": f'attachment; filename="partitions_{item.hw_type}.bin"'},
|
||||
headers={"Content-Disposition": f'attachment; filename="partitions_{hw_type}.bin"'},
|
||||
)
|
||||
|
||||
@@ -33,6 +33,18 @@ def _get_existing_sns(db) -> set:
|
||||
return existing
|
||||
|
||||
|
||||
def _resolve_user_list(raw_list: list) -> list[str]:
|
||||
"""Convert user_list entries (DocumentReferences or path strings) to plain user ID strings."""
|
||||
from google.cloud.firestore_v1 import DocumentReference
|
||||
result = []
|
||||
for entry in raw_list:
|
||||
if isinstance(entry, DocumentReference):
|
||||
result.append(entry.id)
|
||||
elif isinstance(entry, str):
|
||||
result.append(entry.split("/")[-1])
|
||||
return result
|
||||
|
||||
|
||||
def _doc_to_inventory_item(doc) -> DeviceInventoryItem:
|
||||
data = doc.to_dict() or {}
|
||||
created_raw = data.get("created_at")
|
||||
@@ -52,6 +64,9 @@ def _doc_to_inventory_item(doc) -> DeviceInventoryItem:
|
||||
owner=data.get("owner"),
|
||||
assigned_to=data.get("assigned_to"),
|
||||
device_name=data.get("device_name") or None,
|
||||
lifecycle_history=data.get("lifecycle_history") or [],
|
||||
customer_id=data.get("customer_id"),
|
||||
user_list=_resolve_user_list(data.get("user_list") or []),
|
||||
)
|
||||
|
||||
|
||||
@@ -80,11 +95,19 @@ def create_batch(data: BatchCreate) -> BatchResponse:
|
||||
"created_at": now,
|
||||
"owner": None,
|
||||
"assigned_to": None,
|
||||
"users_list": [],
|
||||
"user_list": [],
|
||||
# Legacy fields left empty so existing device views don't break
|
||||
"device_name": "",
|
||||
"device_location": "",
|
||||
"is_Online": False,
|
||||
"lifecycle_history": [
|
||||
{
|
||||
"status_id": "manufactured",
|
||||
"date": now.isoformat(),
|
||||
"note": None,
|
||||
"set_by": None,
|
||||
}
|
||||
],
|
||||
})
|
||||
serial_numbers.append(sn)
|
||||
|
||||
@@ -135,14 +158,31 @@ def get_device_by_sn(sn: str) -> DeviceInventoryItem:
|
||||
return _doc_to_inventory_item(docs[0])
|
||||
|
||||
|
||||
def update_device_status(sn: str, data: DeviceStatusUpdate) -> DeviceInventoryItem:
|
||||
def update_device_status(sn: str, data: DeviceStatusUpdate, set_by: str | None = None) -> DeviceInventoryItem:
|
||||
db = get_db()
|
||||
docs = list(db.collection(COLLECTION).where("serial_number", "==", sn).limit(1).stream())
|
||||
if not docs:
|
||||
raise NotFoundError("Device")
|
||||
|
||||
doc_ref = docs[0].reference
|
||||
update = {"mfg_status": data.status.value}
|
||||
doc_data = docs[0].to_dict() or {}
|
||||
now = datetime.now(timezone.utc).isoformat()
|
||||
|
||||
history = doc_data.get("lifecycle_history") or []
|
||||
|
||||
# Append new lifecycle entry
|
||||
new_entry = {
|
||||
"status_id": data.status.value,
|
||||
"date": now,
|
||||
"note": data.note if data.note else None,
|
||||
"set_by": set_by,
|
||||
}
|
||||
history.append(new_entry)
|
||||
|
||||
update = {
|
||||
"mfg_status": data.status.value,
|
||||
"lifecycle_history": history,
|
||||
}
|
||||
if data.note:
|
||||
update["mfg_status_note"] = data.note
|
||||
doc_ref.update(update)
|
||||
@@ -150,47 +190,114 @@ def update_device_status(sn: str, data: DeviceStatusUpdate) -> DeviceInventoryIt
|
||||
return _doc_to_inventory_item(doc_ref.get())
|
||||
|
||||
|
||||
def get_nvs_binary(sn: str) -> bytes:
|
||||
def get_nvs_binary(sn: str, hw_type_override: str | None = None, hw_revision_override: str | None = None) -> bytes:
|
||||
item = get_device_by_sn(sn)
|
||||
return generate_nvs_binary(
|
||||
serial_number=item.serial_number,
|
||||
hw_type=item.hw_type,
|
||||
hw_version=item.hw_version,
|
||||
hw_family=hw_type_override if hw_type_override else item.hw_type,
|
||||
hw_revision=hw_revision_override if hw_revision_override else item.hw_version,
|
||||
)
|
||||
|
||||
|
||||
def assign_device(sn: str, data: DeviceAssign) -> DeviceInventoryItem:
|
||||
from utils.email import send_device_assignment_invite
|
||||
"""Assign a device to a customer by customer_id.
|
||||
|
||||
- Stores customer_id on the device doc.
|
||||
- Adds the device to the customer's owned_items list.
|
||||
- Sets mfg_status to 'sold' unless device is already 'claimed'.
|
||||
"""
|
||||
db = get_db()
|
||||
CRM_COLLECTION = "crm_customers"
|
||||
|
||||
# Get device doc
|
||||
docs = list(db.collection(COLLECTION).where("serial_number", "==", sn).limit(1).stream())
|
||||
if not docs:
|
||||
raise NotFoundError("Device")
|
||||
|
||||
doc_data = docs[0].to_dict() or {}
|
||||
doc_ref = docs[0].reference
|
||||
doc_ref.update({
|
||||
"owner": data.customer_email,
|
||||
"assigned_to": data.customer_email,
|
||||
"mfg_status": "sold",
|
||||
current_status = doc_data.get("mfg_status", "manufactured")
|
||||
|
||||
# Get customer doc
|
||||
customer_ref = db.collection(CRM_COLLECTION).document(data.customer_id)
|
||||
customer_doc = customer_ref.get()
|
||||
if not customer_doc.exists:
|
||||
raise NotFoundError("Customer")
|
||||
customer_data = customer_doc.to_dict() or {}
|
||||
|
||||
# Determine new status: don't downgrade claimed → sold
|
||||
new_status = current_status if current_status == "claimed" else "sold"
|
||||
|
||||
now = datetime.now(timezone.utc).isoformat()
|
||||
history = doc_data.get("lifecycle_history") or []
|
||||
history.append({
|
||||
"status_id": new_status,
|
||||
"date": now,
|
||||
"note": "Assigned to customer",
|
||||
"set_by": None,
|
||||
})
|
||||
|
||||
hw_type = doc_data.get("hw_type", "")
|
||||
device_name = BOARD_TYPE_LABELS.get(hw_type, hw_type or "Device")
|
||||
doc_ref.update({
|
||||
"customer_id": data.customer_id,
|
||||
"mfg_status": new_status,
|
||||
"lifecycle_history": history,
|
||||
})
|
||||
|
||||
try:
|
||||
send_device_assignment_invite(
|
||||
customer_email=data.customer_email,
|
||||
serial_number=sn,
|
||||
device_name=device_name,
|
||||
customer_name=data.customer_name,
|
||||
)
|
||||
except Exception as exc:
|
||||
logger.error("Assignment succeeded but email failed for %s → %s: %s", sn, data.customer_email, exc)
|
||||
# Add to customer's owned_items (avoid duplicates)
|
||||
owned_items = customer_data.get("owned_items", []) or []
|
||||
device_doc_id = docs[0].id
|
||||
already_assigned = any(
|
||||
item.get("type") == "console_device"
|
||||
and item.get("console_device", {}).get("device_id") == device_doc_id
|
||||
for item in owned_items
|
||||
)
|
||||
if not already_assigned:
|
||||
device_name = doc_data.get("device_name") or BOARD_TYPE_LABELS.get(doc_data.get("hw_type", ""), sn)
|
||||
owned_items.append({
|
||||
"type": "console_device",
|
||||
"console_device": {
|
||||
"device_id": device_doc_id,
|
||||
"serial_number": sn,
|
||||
"label": device_name,
|
||||
},
|
||||
})
|
||||
customer_ref.update({"owned_items": owned_items})
|
||||
|
||||
return _doc_to_inventory_item(doc_ref.get())
|
||||
|
||||
|
||||
def search_customers(q: str) -> list:
|
||||
"""Search crm_customers by name, email, phone, organization, or tags."""
|
||||
db = get_db()
|
||||
CRM_COLLECTION = "crm_customers"
|
||||
docs = db.collection(CRM_COLLECTION).stream()
|
||||
results = []
|
||||
q_lower = q.lower().strip()
|
||||
for doc in docs:
|
||||
data = doc.to_dict() or {}
|
||||
loc = data.get("location") or {}
|
||||
loc = loc if isinstance(loc, dict) else {}
|
||||
city = loc.get("city") or ""
|
||||
searchable = " ".join(filter(None, [
|
||||
data.get("name"), data.get("surname"),
|
||||
data.get("email"), data.get("phone"), data.get("organization"),
|
||||
loc.get("address"), loc.get("city"), loc.get("postal_code"),
|
||||
loc.get("region"), loc.get("country"),
|
||||
" ".join(data.get("tags") or []),
|
||||
])).lower()
|
||||
if not q_lower or q_lower in searchable:
|
||||
results.append({
|
||||
"id": doc.id,
|
||||
"name": data.get("name") or "",
|
||||
"surname": data.get("surname") or "",
|
||||
"email": data.get("email") or "",
|
||||
"organization": data.get("organization") or "",
|
||||
"phone": data.get("phone") or "",
|
||||
"city": city or "",
|
||||
})
|
||||
return results
|
||||
|
||||
|
||||
def get_stats() -> ManufacturingStats:
|
||||
db = get_db()
|
||||
docs = list(db.collection(COLLECTION).stream())
|
||||
|
||||
@@ -30,6 +30,7 @@ class MelodyInfo(BaseModel):
|
||||
isTrueRing: bool = False
|
||||
previewURL: str = ""
|
||||
archetype_csv: Optional[str] = None
|
||||
outdated_archetype: bool = False
|
||||
|
||||
|
||||
class MelodyAttributes(BaseModel):
|
||||
|
||||
@@ -146,6 +146,23 @@ async def get_files(
|
||||
return service.get_storage_files(melody_id, melody.uid)
|
||||
|
||||
|
||||
@router.patch("/{melody_id}/set-outdated", response_model=MelodyInDB)
|
||||
async def set_outdated(
|
||||
melody_id: str,
|
||||
outdated: bool = Query(...),
|
||||
_user: TokenPayload = Depends(require_permission("melodies", "edit")),
|
||||
):
|
||||
"""Manually set or clear the outdated_archetype flag on a melody."""
|
||||
melody = await service.get_melody(melody_id)
|
||||
info = melody.information.model_dump()
|
||||
info["outdated_archetype"] = outdated
|
||||
return await service.update_melody(
|
||||
melody_id,
|
||||
MelodyUpdate(information=MelodyInfo(**info)),
|
||||
actor_name=_user.name,
|
||||
)
|
||||
|
||||
|
||||
@router.get("/{melody_id}/download/binary")
|
||||
async def download_binary_file(
|
||||
melody_id: str,
|
||||
|
||||
0
backend/public/__init__.py
Normal file
0
backend/public/__init__.py
Normal file
208
backend/public/router.py
Normal file
208
backend/public/router.py
Normal file
@@ -0,0 +1,208 @@
|
||||
"""
|
||||
Public (no-auth) endpoints for CloudFlash and feature gate checks.
|
||||
"""
|
||||
from fastapi import APIRouter, HTTPException
|
||||
from fastapi.responses import Response
|
||||
from pydantic import BaseModel
|
||||
from typing import List, Optional
|
||||
|
||||
from settings.public_features_service import get_public_features
|
||||
from firmware.service import list_firmware
|
||||
from utils.nvs_generator import generate as generate_nvs
|
||||
from manufacturing.service import get_device_by_sn
|
||||
from shared.exceptions import NotFoundError
|
||||
|
||||
router = APIRouter(prefix="/api/public", tags=["public"])
|
||||
|
||||
|
||||
# ── Feature gate ──────────────────────────────────────────────────────────────
|
||||
|
||||
class CloudFlashStatus(BaseModel):
|
||||
enabled: bool
|
||||
|
||||
|
||||
@router.get("/cloudflash/status", response_model=CloudFlashStatus)
|
||||
async def cloudflash_status():
|
||||
"""Returns whether the CloudFlash public page is currently enabled."""
|
||||
settings = get_public_features()
|
||||
return CloudFlashStatus(enabled=settings.cloudflash_enabled)
|
||||
|
||||
|
||||
def _require_cloudflash_enabled():
|
||||
"""Raises 403 if CloudFlash is disabled."""
|
||||
settings = get_public_features()
|
||||
if not settings.cloudflash_enabled:
|
||||
raise HTTPException(status_code=403, detail="CloudFlash is currently disabled.")
|
||||
|
||||
|
||||
# ── Public firmware list ───────────────────────────────────────────────────────
|
||||
|
||||
class PublicFirmwareOption(BaseModel):
|
||||
hw_type: str
|
||||
hw_type_label: str
|
||||
channel: str
|
||||
version: str
|
||||
download_url: str
|
||||
|
||||
|
||||
HW_TYPE_LABELS = {
|
||||
"vesper": "Vesper",
|
||||
"vesper_plus": "Vesper Plus",
|
||||
"vesper_pro": "Vesper Pro",
|
||||
"agnus": "Agnus",
|
||||
"agnus_mini": "Agnus Mini",
|
||||
"chronos": "Chronos",
|
||||
"chronos_pro": "Chronos Pro",
|
||||
}
|
||||
|
||||
|
||||
@router.get("/cloudflash/firmware", response_model=List[PublicFirmwareOption])
|
||||
async def list_public_firmware():
|
||||
"""
|
||||
Returns all available firmware options (is_latest=True, non-bespoke, stable channel only).
|
||||
No authentication required — used by the public CloudFlash page.
|
||||
"""
|
||||
_require_cloudflash_enabled()
|
||||
|
||||
all_fw = list_firmware()
|
||||
options = []
|
||||
for fw in all_fw:
|
||||
if not fw.is_latest:
|
||||
continue
|
||||
if fw.hw_type == "bespoke":
|
||||
continue
|
||||
if fw.channel != "stable":
|
||||
continue
|
||||
options.append(PublicFirmwareOption(
|
||||
hw_type=fw.hw_type,
|
||||
hw_type_label=HW_TYPE_LABELS.get(fw.hw_type, fw.hw_type.replace("_", " ").title()),
|
||||
channel=fw.channel,
|
||||
version=fw.version,
|
||||
download_url=f"/api/firmware/{fw.hw_type}/{fw.channel}/{fw.version}/firmware.bin",
|
||||
))
|
||||
|
||||
# Sort by hw_type label
|
||||
options.sort(key=lambda x: x.hw_type_label)
|
||||
return options
|
||||
|
||||
|
||||
# ── Public serial number validation ──────────────────────────────────────────
|
||||
|
||||
class SerialValidationResult(BaseModel):
|
||||
valid: bool
|
||||
hw_type: Optional[str] = None
|
||||
hw_type_label: Optional[str] = None
|
||||
hw_version: Optional[str] = None
|
||||
|
||||
|
||||
@router.get("/cloudflash/validate-serial/{serial_number}", response_model=SerialValidationResult)
|
||||
async def validate_serial(serial_number: str):
|
||||
"""
|
||||
Check whether a serial number exists in the device database.
|
||||
Returns hw_type info if found so the frontend can confirm it matches the user's selection.
|
||||
No sensitive device data is returned.
|
||||
"""
|
||||
_require_cloudflash_enabled()
|
||||
|
||||
sn = serial_number.strip().upper()
|
||||
try:
|
||||
device = get_device_by_sn(sn)
|
||||
return SerialValidationResult(
|
||||
valid=True,
|
||||
hw_type=device.hw_type,
|
||||
hw_type_label=HW_TYPE_LABELS.get(device.hw_type, device.hw_type.replace("_", " ").title()),
|
||||
hw_version=device.hw_version,
|
||||
)
|
||||
except Exception:
|
||||
return SerialValidationResult(valid=False)
|
||||
|
||||
|
||||
# ── Public NVS generation ─────────────────────────────────────────────────────
|
||||
|
||||
class NvsRequest(BaseModel):
|
||||
serial_number: str
|
||||
hw_type: str
|
||||
hw_revision: str
|
||||
|
||||
|
||||
@router.post("/cloudflash/nvs.bin")
|
||||
async def generate_public_nvs(body: NvsRequest):
|
||||
"""
|
||||
Generate an NVS binary for a given serial number + hardware info.
|
||||
No authentication required — used by the public CloudFlash page for Full Wipe flash.
|
||||
The serial number is provided by the user (they read it from the sticker on their device).
|
||||
"""
|
||||
_require_cloudflash_enabled()
|
||||
|
||||
sn = body.serial_number.strip().upper()
|
||||
if not sn:
|
||||
raise HTTPException(status_code=422, detail="Serial number is required.")
|
||||
|
||||
hw_type = body.hw_type.strip().lower()
|
||||
hw_revision = body.hw_revision.strip()
|
||||
|
||||
if not hw_type or not hw_revision:
|
||||
raise HTTPException(status_code=422, detail="hw_type and hw_revision are required.")
|
||||
|
||||
try:
|
||||
nvs_bytes = generate_nvs(
|
||||
serial_number=sn,
|
||||
hw_family=hw_type,
|
||||
hw_revision=hw_revision,
|
||||
)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"NVS generation failed: {str(e)}")
|
||||
|
||||
return Response(
|
||||
content=nvs_bytes,
|
||||
media_type="application/octet-stream",
|
||||
headers={"Content-Disposition": f'attachment; filename="{sn}_nvs.bin"'},
|
||||
)
|
||||
|
||||
|
||||
# ── Public flash assets (bootloader + partitions) ─────────────────────────────
|
||||
|
||||
@router.get("/cloudflash/{hw_type}/bootloader.bin")
|
||||
async def get_public_bootloader(hw_type: str):
|
||||
"""
|
||||
Serve the bootloader binary for a given hw_type.
|
||||
No authentication required — used by the public CloudFlash page.
|
||||
"""
|
||||
_require_cloudflash_enabled()
|
||||
|
||||
import os
|
||||
from config import settings as cfg
|
||||
from pathlib import Path
|
||||
|
||||
asset_path = Path(cfg.flash_assets_storage_path) / hw_type / "bootloader.bin"
|
||||
if not asset_path.exists():
|
||||
raise HTTPException(status_code=404, detail=f"Bootloader not found for {hw_type}.")
|
||||
|
||||
return Response(
|
||||
content=asset_path.read_bytes(),
|
||||
media_type="application/octet-stream",
|
||||
headers={"Content-Disposition": f'attachment; filename="bootloader_{hw_type}.bin"'},
|
||||
)
|
||||
|
||||
|
||||
@router.get("/cloudflash/{hw_type}/partitions.bin")
|
||||
async def get_public_partitions(hw_type: str):
|
||||
"""
|
||||
Serve the partition table binary for a given hw_type.
|
||||
No authentication required — used by the public CloudFlash page.
|
||||
"""
|
||||
_require_cloudflash_enabled()
|
||||
|
||||
import os
|
||||
from config import settings as cfg
|
||||
from pathlib import Path
|
||||
|
||||
asset_path = Path(cfg.flash_assets_storage_path) / hw_type / "partitions.bin"
|
||||
if not asset_path.exists():
|
||||
raise HTTPException(status_code=404, detail=f"Partition table not found for {hw_type}.")
|
||||
|
||||
return Response(
|
||||
content=asset_path.read_bytes(),
|
||||
media_type="application/octet-stream",
|
||||
headers={"Content-Disposition": f'attachment; filename="partitions_{hw_type}.bin"'},
|
||||
)
|
||||
10
backend/settings/public_features_models.py
Normal file
10
backend/settings/public_features_models.py
Normal file
@@ -0,0 +1,10 @@
|
||||
from pydantic import BaseModel
|
||||
from typing import Optional
|
||||
|
||||
|
||||
class PublicFeaturesSettings(BaseModel):
|
||||
cloudflash_enabled: bool = False
|
||||
|
||||
|
||||
class PublicFeaturesSettingsUpdate(BaseModel):
|
||||
cloudflash_enabled: Optional[bool] = None
|
||||
31
backend/settings/public_features_service.py
Normal file
31
backend/settings/public_features_service.py
Normal file
@@ -0,0 +1,31 @@
|
||||
from shared.firebase import get_db
|
||||
from settings.public_features_models import PublicFeaturesSettings, PublicFeaturesSettingsUpdate
|
||||
|
||||
COLLECTION = "admin_settings"
|
||||
DOC_ID = "public_features"
|
||||
|
||||
|
||||
def get_public_features() -> PublicFeaturesSettings:
|
||||
"""Get public features settings from Firestore. Creates defaults if not found."""
|
||||
db = get_db()
|
||||
doc = db.collection(COLLECTION).document(DOC_ID).get()
|
||||
if doc.exists:
|
||||
return PublicFeaturesSettings(**doc.to_dict())
|
||||
defaults = PublicFeaturesSettings()
|
||||
db.collection(COLLECTION).document(DOC_ID).set(defaults.model_dump())
|
||||
return defaults
|
||||
|
||||
|
||||
def update_public_features(data: PublicFeaturesSettingsUpdate) -> PublicFeaturesSettings:
|
||||
"""Update public features settings. Only provided fields are updated."""
|
||||
db = get_db()
|
||||
doc_ref = db.collection(COLLECTION).document(DOC_ID)
|
||||
doc = doc_ref.get()
|
||||
|
||||
existing = doc.to_dict() if doc.exists else PublicFeaturesSettings().model_dump()
|
||||
update_data = data.model_dump(exclude_none=True)
|
||||
existing.update(update_data)
|
||||
|
||||
normalized = PublicFeaturesSettings(**existing)
|
||||
doc_ref.set(normalized.model_dump())
|
||||
return normalized
|
||||
@@ -1,8 +1,11 @@
|
||||
from fastapi import APIRouter, Depends
|
||||
from auth.models import TokenPayload
|
||||
from auth.dependencies import require_permission
|
||||
from auth.dependencies import require_permission, require_roles
|
||||
from auth.models import Role
|
||||
from settings.models import MelodySettings, MelodySettingsUpdate
|
||||
from settings.public_features_models import PublicFeaturesSettings, PublicFeaturesSettingsUpdate
|
||||
from settings import service
|
||||
from settings import public_features_service
|
||||
|
||||
router = APIRouter(prefix="/api/settings", tags=["settings"])
|
||||
|
||||
@@ -20,3 +23,20 @@ async def update_melody_settings(
|
||||
_user: TokenPayload = Depends(require_permission("melodies", "edit")),
|
||||
):
|
||||
return service.update_melody_settings(body)
|
||||
|
||||
|
||||
# ── Public Features Settings (sysadmin / admin only) ─────────────────────────
|
||||
|
||||
@router.get("/public-features", response_model=PublicFeaturesSettings)
|
||||
async def get_public_features(
|
||||
_user: TokenPayload = Depends(require_roles(Role.sysadmin, Role.admin)),
|
||||
):
|
||||
return public_features_service.get_public_features()
|
||||
|
||||
|
||||
@router.put("/public-features", response_model=PublicFeaturesSettings)
|
||||
async def update_public_features(
|
||||
body: PublicFeaturesSettingsUpdate,
|
||||
_user: TokenPayload = Depends(require_roles(Role.sysadmin, Role.admin)),
|
||||
):
|
||||
return public_features_service.update_public_features(body)
|
||||
|
||||
@@ -177,16 +177,16 @@ def _build_page(entries: List[bytes], slot_counts: List[int], seq: int = 0) -> b
|
||||
return page
|
||||
|
||||
|
||||
def generate(serial_number: str, hw_type: str, hw_version: str) -> bytes:
|
||||
def generate(serial_number: str, hw_family: str, hw_revision: str) -> bytes:
|
||||
"""Generate a 0x5000-byte NVS partition binary for a Vesper device.
|
||||
|
||||
serial_number: full SN string e.g. 'BSVSPR-26C13X-STD01R-X7KQA'
|
||||
hw_type: board family e.g. 'vesper', 'vesper_plus', 'vesper_pro'
|
||||
hw_version: zero-padded revision e.g. '01'
|
||||
hw_family: board family e.g. 'vesper-standard', 'vesper-plus'
|
||||
hw_revision: hardware revision string e.g. '1.0'
|
||||
|
||||
Writes the NEW schema keys (2.0+) expected by ConfigManager:
|
||||
Writes the schema keys expected by ConfigManager (struct DeviceConfig):
|
||||
serial ← full serial number
|
||||
hw_family ← board family (hw_type value, lowercase)
|
||||
hw_family ← board family (lowercase)
|
||||
hw_revision ← hardware revision string
|
||||
|
||||
Returns raw bytes ready to flash at 0x9000.
|
||||
@@ -196,8 +196,8 @@ def generate(serial_number: str, hw_type: str, hw_version: str) -> bytes:
|
||||
# Build entries for namespace "device_id"
|
||||
ns_entry, ns_span = _build_namespace_entry("device_id", ns_index)
|
||||
uid_entry, uid_span = _build_string_entry(ns_index, "serial", serial_number)
|
||||
hwt_entry, hwt_span = _build_string_entry(ns_index, "hw_family", hw_type.lower())
|
||||
hwv_entry, hwv_span = _build_string_entry(ns_index, "hw_revision", hw_version)
|
||||
hwt_entry, hwt_span = _build_string_entry(ns_index, "hw_family", hw_family.lower())
|
||||
hwv_entry, hwv_span = _build_string_entry(ns_index, "hw_revision", hw_revision)
|
||||
|
||||
entries = [ns_entry, uid_entry, hwt_entry, hwv_entry]
|
||||
spans = [ns_span, uid_span, hwt_span, hwv_span]
|
||||
|
||||
Reference in New Issue
Block a user