update: Major Overhault to all subsystems

This commit is contained in:
2026-03-07 11:32:18 +02:00
parent 810e81b323
commit b280d62ee5
107 changed files with 20414 additions and 929 deletions

View File

@@ -1,5 +1,15 @@
FROM python:3.11-slim
# WeasyPrint system dependencies (libpango, libcairo, etc.)
RUN apt-get update && apt-get install -y --no-install-recommends \
libpango-1.0-0 \
libpangocairo-1.0-0 \
libgdk-pixbuf-2.0-0 \
libffi-dev \
shared-mime-info \
fonts-dejavu-core \
&& apt-get clean && rm -rf /var/lib/apt/lists/*
WORKDIR /app
COPY requirements.txt .

View File

@@ -10,45 +10,141 @@ class Role(str, Enum):
user = "user"
class SectionPermissions(BaseModel):
class MelodiesPermissions(BaseModel):
view: bool = False
add: bool = False
delete: bool = False
safe_edit: bool = False
full_edit: bool = False
archetype_access: bool = False
settings_access: bool = False
compose_access: bool = False
class DevicesPermissions(BaseModel):
view: bool = False
add: bool = False
delete: bool = False
safe_edit: bool = False
edit_bells: bool = False
edit_clock: bool = False
edit_warranty: bool = False
full_edit: bool = False
control: bool = False
class AppUsersPermissions(BaseModel):
view: bool = False
add: bool = False
delete: bool = False
safe_edit: bool = False
full_edit: bool = False
class IssuesNotesPermissions(BaseModel):
view: bool = False
add: bool = False
delete: bool = False
edit: bool = False
class MailPermissions(BaseModel):
view: bool = False
compose: bool = False
reply: bool = False
class CrmPermissions(BaseModel):
activity_log: bool = False
class CrmCustomersPermissions(BaseModel):
full_access: bool = False
overview: bool = False
orders_view: bool = False
orders_edit: bool = False
quotations_view: bool = False
quotations_edit: bool = False
comms_view: bool = False
comms_log: bool = False
comms_edit: bool = False
comms_compose: bool = False
add: bool = False
delete: bool = False
files_view: bool = False
files_edit: bool = False
devices_view: bool = False
devices_edit: bool = False
class CrmProductsPermissions(BaseModel):
view: bool = False
add: bool = False
edit: bool = False
delete: bool = False
class MfgPermissions(BaseModel):
view_inventory: bool = False
edit: bool = False
provision: bool = False
firmware_view: bool = False
firmware_edit: bool = False
class ApiReferencePermissions(BaseModel):
access: bool = False
class MqttPermissions(BaseModel):
access: bool = False
class StaffPermissions(BaseModel):
melodies: SectionPermissions = SectionPermissions()
devices: SectionPermissions = SectionPermissions()
app_users: SectionPermissions = SectionPermissions()
equipment: SectionPermissions = SectionPermissions()
manufacturing: SectionPermissions = SectionPermissions()
mqtt: bool = False
melodies: MelodiesPermissions = MelodiesPermissions()
devices: DevicesPermissions = DevicesPermissions()
app_users: AppUsersPermissions = AppUsersPermissions()
issues_notes: IssuesNotesPermissions = IssuesNotesPermissions()
mail: MailPermissions = MailPermissions()
crm: CrmPermissions = CrmPermissions()
crm_customers: CrmCustomersPermissions = CrmCustomersPermissions()
crm_products: CrmProductsPermissions = CrmProductsPermissions()
mfg: MfgPermissions = MfgPermissions()
api_reference: ApiReferencePermissions = ApiReferencePermissions()
mqtt: MqttPermissions = MqttPermissions()
# Default permissions per role
def default_permissions_for_role(role: str) -> Optional[dict]:
if role in ("sysadmin", "admin"):
return None # Full access, permissions field not used
full = {"view": True, "add": True, "edit": True, "delete": True}
view_only = {"view": True, "add": False, "edit": False, "delete": False}
if role == "editor":
return {
"melodies": full,
"devices": full,
"app_users": full,
"equipment": full,
"manufacturing": view_only,
"mqtt": True,
"melodies": {"view": True, "add": True, "delete": True, "safe_edit": True, "full_edit": True, "archetype_access": True, "settings_access": True, "compose_access": True},
"devices": {"view": True, "add": True, "delete": True, "safe_edit": True, "edit_bells": True, "edit_clock": True, "edit_warranty": True, "full_edit": True, "control": True},
"app_users": {"view": True, "add": True, "delete": True, "safe_edit": True, "full_edit": True},
"issues_notes": {"view": True, "add": True, "delete": True, "edit": True},
"mail": {"view": True, "compose": True, "reply": True},
"crm": {"activity_log": True},
"crm_customers": {"full_access": True, "overview": True, "orders_view": True, "orders_edit": True, "quotations_view": True, "quotations_edit": True, "comms_view": True, "comms_log": True, "comms_edit": True, "comms_compose": True, "add": True, "delete": True, "files_view": True, "files_edit": True, "devices_view": True, "devices_edit": True},
"crm_products": {"view": True, "add": True, "edit": True},
"mfg": {"view_inventory": True, "edit": True, "provision": True, "firmware_view": True, "firmware_edit": True},
"api_reference": {"access": True},
"mqtt": {"access": True},
}
# user role - view only
return {
"melodies": view_only,
"devices": view_only,
"app_users": view_only,
"equipment": view_only,
"manufacturing": {"view": False, "add": False, "edit": False, "delete": False},
"mqtt": False,
"melodies": {"view": True, "add": False, "delete": False, "safe_edit": False, "full_edit": False, "archetype_access": False, "settings_access": False, "compose_access": False},
"devices": {"view": True, "add": False, "delete": False, "safe_edit": False, "edit_bells": False, "edit_clock": False, "edit_warranty": False, "full_edit": False, "control": False},
"app_users": {"view": True, "add": False, "delete": False, "safe_edit": False, "full_edit": False},
"issues_notes": {"view": True, "add": False, "delete": False, "edit": False},
"mail": {"view": True, "compose": False, "reply": False},
"crm": {"activity_log": False},
"crm_customers": {"full_access": False, "overview": True, "orders_view": True, "orders_edit": False, "quotations_view": True, "quotations_edit": False, "comms_view": True, "comms_log": False, "comms_edit": False, "comms_compose": False, "add": False, "delete": False, "files_view": True, "files_edit": False, "devices_view": True, "devices_edit": False},
"crm_products": {"view": True, "add": False, "edit": False},
"mfg": {"view_inventory": True, "edit": False, "provision": False, "firmware_view": True, "firmware_edit": False},
"api_reference": {"access": False},
"mqtt": {"access": False},
}

View File

@@ -1,5 +1,5 @@
from pydantic_settings import BaseSettings
from typing import List
from typing import List, Dict, Any
import json
@@ -20,6 +20,7 @@ class Settings(BaseSettings):
mqtt_admin_password: str = ""
mqtt_secret: str = "change-me-in-production"
mosquitto_password_file: str = "/etc/mosquitto/passwd"
mqtt_client_id: str = "bellsystems-admin-panel"
# SQLite (MQTT data storage)
sqlite_db_path: str = "./mqtt_data.db"
@@ -37,6 +38,30 @@ class Settings(BaseSettings):
backend_cors_origins: str = '["http://localhost:5173"]'
debug: bool = True
# Nextcloud WebDAV
nextcloud_url: str = ""
nextcloud_username: str = "" # WebDAV login & URL path username
nextcloud_password: str = "" # Use an app password for better security
nextcloud_dav_user: str = "" # Override URL path username if different from login
nextcloud_base_path: str = "BellSystems"
# IMAP/SMTP Email
imap_host: str = ""
imap_port: int = 993
imap_username: str = ""
imap_password: str = ""
imap_use_ssl: bool = True
smtp_host: str = ""
smtp_port: int = 587
smtp_username: str = ""
smtp_password: str = ""
smtp_use_tls: bool = True
email_sync_interval_minutes: int = 15
# Multi-mailbox config (JSON array). If empty, legacy single-account IMAP/SMTP is used.
# Example item:
# {"key":"sales","label":"Sales","email":"sales@bellsystems.gr","imap_host":"...","imap_username":"...","imap_password":"...","smtp_host":"...","smtp_username":"...","smtp_password":"...","sync_inbound":true,"allow_send":true}
mail_accounts_json: str = "[]"
# Auto-deploy (Gitea webhook)
deploy_secret: str = ""
deploy_project_path: str = "/app"
@@ -45,6 +70,14 @@ class Settings(BaseSettings):
def cors_origins(self) -> List[str]:
return json.loads(self.backend_cors_origins)
@property
def mail_accounts(self) -> List[Dict[str, Any]]:
try:
raw = json.loads(self.mail_accounts_json or "[]")
return raw if isinstance(raw, list) else []
except Exception:
return []
model_config = {"env_file": ".env", "extra": "ignore"}

0
backend/crm/__init__.py Normal file
View File

417
backend/crm/comms_router.py Normal file
View File

@@ -0,0 +1,417 @@
import base64
import json
from fastapi import APIRouter, Depends, HTTPException, Query, Form, File, UploadFile
from pydantic import BaseModel
from typing import List, Optional
from auth.models import TokenPayload
from auth.dependencies import require_permission
from config import settings
from crm.models import CommCreate, CommUpdate, CommInDB, CommListResponse, MediaCreate, MediaDirection
from crm import service
from crm import email_sync
from crm.mail_accounts import get_mail_accounts
router = APIRouter(prefix="/api/crm/comms", tags=["crm-comms"])
class EmailSendResponse(BaseModel):
entry: dict
class EmailSyncResponse(BaseModel):
new_count: int
class MailListResponse(BaseModel):
entries: list
total: int
@router.get("/all", response_model=CommListResponse)
async def list_all_comms(
type: Optional[str] = Query(None),
direction: Optional[str] = Query(None),
limit: int = Query(200, le=500),
_user: TokenPayload = Depends(require_permission("crm", "view")),
):
entries = await service.list_all_comms(type=type, direction=direction, limit=limit)
return CommListResponse(entries=entries, total=len(entries))
@router.get("", response_model=CommListResponse)
async def list_comms(
customer_id: str = Query(...),
type: Optional[str] = Query(None),
direction: Optional[str] = Query(None),
_user: TokenPayload = Depends(require_permission("crm", "view")),
):
entries = await service.list_comms(customer_id=customer_id, type=type, direction=direction)
return CommListResponse(entries=entries, total=len(entries))
@router.post("", response_model=CommInDB, status_code=201)
async def create_comm(
body: CommCreate,
_user: TokenPayload = Depends(require_permission("crm", "edit")),
):
return await service.create_comm(body)
@router.get("/email/all", response_model=MailListResponse)
async def list_all_emails(
direction: Optional[str] = Query(None),
customers_only: bool = Query(False),
mailbox: Optional[str] = Query(None, description="sales|support|both|all or account key"),
limit: int = Query(500, le=1000),
_user: TokenPayload = Depends(require_permission("crm", "view")),
):
"""Return all email comms (all senders + unmatched), for the Mail page."""
selected_accounts = None
if mailbox and mailbox not in {"all", "both"}:
if mailbox == "sales":
selected_accounts = ["sales"]
elif mailbox == "support":
selected_accounts = ["support"]
else:
selected_accounts = [mailbox]
entries = await service.list_all_emails(
direction=direction,
customers_only=customers_only,
mail_accounts=selected_accounts,
limit=limit,
)
return MailListResponse(entries=entries, total=len(entries))
@router.get("/email/accounts")
async def list_mail_accounts(
_user: TokenPayload = Depends(require_permission("crm", "view")),
):
accounts = get_mail_accounts()
return {
"accounts": [
{
"key": a["key"],
"label": a["label"],
"email": a["email"],
"sync_inbound": bool(a.get("sync_inbound")),
"allow_send": bool(a.get("allow_send")),
}
for a in accounts
]
}
@router.get("/email/check")
async def check_new_emails(
_user: TokenPayload = Depends(require_permission("crm", "view")),
):
"""Lightweight check: returns how many emails are on the server vs. stored locally."""
return await email_sync.check_new_emails()
# Email endpoints — must be before /{comm_id} wildcard routes
@router.post("/email/send", response_model=EmailSendResponse)
async def send_email_endpoint(
customer_id: Optional[str] = Form(None),
from_account: Optional[str] = Form(None),
to: str = Form(...),
subject: str = Form(...),
body: str = Form(...),
body_html: str = Form(""),
cc: str = Form("[]"), # JSON-encoded list of strings
files: List[UploadFile] = File(default=[]),
user: TokenPayload = Depends(require_permission("crm", "edit")),
):
if not get_mail_accounts():
raise HTTPException(status_code=503, detail="SMTP not configured")
try:
cc_list: List[str] = json.loads(cc) if cc else []
except Exception:
cc_list = []
# Read all uploaded files into memory
file_attachments = []
for f in files:
content = await f.read()
mime_type = f.content_type or "application/octet-stream"
file_attachments.append((f.filename, content, mime_type))
from crm.email_sync import send_email
try:
entry = await send_email(
customer_id=customer_id or None,
from_account=from_account,
to=to,
subject=subject,
body=body,
body_html=body_html,
cc=cc_list,
sent_by=user.name or user.sub,
file_attachments=file_attachments if file_attachments else None,
)
except RuntimeError as e:
raise HTTPException(status_code=400, detail=str(e))
return EmailSendResponse(entry=entry)
@router.post("/email/sync", response_model=EmailSyncResponse)
async def sync_email_endpoint(
_user: TokenPayload = Depends(require_permission("crm", "edit")),
):
if not get_mail_accounts():
raise HTTPException(status_code=503, detail="IMAP not configured")
from crm.email_sync import sync_emails
new_count = await sync_emails()
return EmailSyncResponse(new_count=new_count)
class SaveInlineRequest(BaseModel):
data_uri: str
filename: str
subfolder: str = "received_media"
mime_type: Optional[str] = None
async def _resolve_customer_folder(customer_id: str) -> str:
"""Return the Nextcloud folder_id for a customer (falls back to customer_id)."""
from shared.firebase import get_db as get_firestore
firestore_db = get_firestore()
doc = firestore_db.collection("crm_customers").document(customer_id).get()
if not doc.exists:
raise HTTPException(status_code=404, detail="Customer not found")
data = doc.to_dict()
return data.get("folder_id") or customer_id
async def _upload_to_nc(folder_id: str, subfolder: str, filename: str,
content: bytes, mime_type: str, customer_id: str,
uploaded_by: str, tags: list[str]) -> dict:
from crm import nextcloud
target_folder = f"customers/{folder_id}/{subfolder}"
file_path = f"{target_folder}/{filename}"
await nextcloud.ensure_folder(target_folder)
await nextcloud.upload_file(file_path, content, mime_type)
media = await service.create_media(MediaCreate(
customer_id=customer_id,
filename=filename,
nextcloud_path=file_path,
mime_type=mime_type,
direction=MediaDirection.received,
tags=tags,
uploaded_by=uploaded_by,
))
return {"ok": True, "media_id": media.id, "nextcloud_path": file_path}
@router.post("/email/{comm_id}/save-inline")
async def save_email_inline_image(
comm_id: str,
body: SaveInlineRequest,
user: TokenPayload = Depends(require_permission("crm", "edit")),
):
"""Save an inline image (data-URI from email HTML body) to Nextcloud."""
comm = await service.get_comm(comm_id)
customer_id = comm.customer_id
if not customer_id:
raise HTTPException(status_code=400, detail="This email is not linked to a customer")
folder_id = await _resolve_customer_folder(customer_id)
# Parse data URI
data_uri = body.data_uri
mime_type = body.mime_type or "image/png"
if "," in data_uri:
header, encoded = data_uri.split(",", 1)
try:
mime_type = header.split(":")[1].split(";")[0]
except Exception:
pass
else:
encoded = data_uri
try:
content = base64.b64decode(encoded)
except Exception:
raise HTTPException(status_code=400, detail="Invalid base64 data")
return await _upload_to_nc(
folder_id, body.subfolder, body.filename,
content, mime_type, customer_id,
user.name or user.sub, ["email-inline-image"],
)
@router.post("/email/{comm_id}/save-attachment/{attachment_index}")
async def save_email_attachment(
comm_id: str,
attachment_index: int,
filename: str = Form(...),
subfolder: str = Form("received_media"),
user: TokenPayload = Depends(require_permission("crm", "edit")),
):
"""
Re-fetch a specific attachment from IMAP (by index in the email's attachment list)
and save it to the customer's Nextcloud media folder.
"""
import asyncio
comm = await service.get_comm(comm_id)
customer_id = comm.customer_id
if not customer_id:
raise HTTPException(status_code=400, detail="This email is not linked to a customer")
ext_message_id = comm.ext_message_id
if not ext_message_id:
raise HTTPException(status_code=400, detail="No message ID stored for this email")
attachments_meta = comm.attachments or []
if attachment_index < 0 or attachment_index >= len(attachments_meta):
raise HTTPException(status_code=400, detail="Attachment index out of range")
att_meta = attachments_meta[attachment_index]
mime_type = att_meta.content_type or "application/octet-stream"
from crm.mail_accounts import account_by_key, account_by_email
account = account_by_key(comm.mail_account) or account_by_email(comm.from_addr)
if not account:
raise HTTPException(status_code=400, detail="Email account config not found for this message")
# Re-fetch from IMAP in executor
def _fetch_attachment():
import imaplib, email as _email
if account.get("imap_use_ssl"):
imap = imaplib.IMAP4_SSL(account["imap_host"], int(account["imap_port"]))
else:
imap = imaplib.IMAP4(account["imap_host"], int(account["imap_port"]))
imap.login(account["imap_username"], account["imap_password"])
imap.select(account.get("imap_inbox", "INBOX"))
# Search by Message-ID header
_, data = imap.search(None, f'HEADER Message-ID "{ext_message_id}"')
uids = data[0].split() if data[0] else []
if not uids:
raise ValueError(f"Message not found on IMAP server: {ext_message_id}")
_, msg_data = imap.fetch(uids[0], "(RFC822)")
raw = msg_data[0][1]
msg = _email.message_from_bytes(raw)
imap.logout()
# Walk attachments in order — find the one at attachment_index
found_idx = 0
for part in msg.walk():
cd = str(part.get("Content-Disposition", ""))
if "attachment" not in cd:
continue
if found_idx == attachment_index:
payload = part.get_payload(decode=True)
if payload is None:
raise ValueError("Attachment payload is empty")
return payload
found_idx += 1
raise ValueError(f"Attachment index {attachment_index} not found in message")
loop = asyncio.get_event_loop()
try:
content = await loop.run_in_executor(None, _fetch_attachment)
except ValueError as e:
raise HTTPException(status_code=404, detail=str(e))
except Exception as e:
raise HTTPException(status_code=502, detail=f"IMAP fetch failed: {e}")
folder_id = await _resolve_customer_folder(customer_id)
return await _upload_to_nc(
folder_id, subfolder, filename,
content, mime_type, customer_id,
user.name or user.sub, ["email-attachment"],
)
class BulkDeleteRequest(BaseModel):
ids: List[str]
class ToggleImportantRequest(BaseModel):
important: bool
class ToggleReadRequest(BaseModel):
read: bool
@router.post("/bulk-delete", status_code=200)
async def bulk_delete_comms(
body: BulkDeleteRequest,
_user: TokenPayload = Depends(require_permission("crm", "edit")),
):
# Try remote IMAP delete for email rows first (best-effort), then local delete.
for comm_id in body.ids:
try:
comm = await service.get_comm(comm_id)
if comm.type == "email" and comm.ext_message_id:
await email_sync.delete_remote_email(
comm.ext_message_id,
comm.mail_account,
comm.from_addr,
)
except Exception:
# Keep delete resilient; local delete still proceeds.
pass
count = await service.delete_comms_bulk(body.ids)
return {"deleted": count}
@router.patch("/{comm_id}/important", response_model=CommInDB)
async def set_comm_important(
comm_id: str,
body: ToggleImportantRequest,
_user: TokenPayload = Depends(require_permission("crm", "edit")),
):
return await service.set_comm_important(comm_id, body.important)
@router.patch("/{comm_id}/read", response_model=CommInDB)
async def set_comm_read(
comm_id: str,
body: ToggleReadRequest,
_user: TokenPayload = Depends(require_permission("crm", "edit")),
):
try:
comm = await service.get_comm(comm_id)
if comm.type == "email" and comm.ext_message_id:
await email_sync.set_remote_read(
comm.ext_message_id,
comm.mail_account,
comm.from_addr,
body.read,
)
except Exception:
pass
return await service.set_comm_read(comm_id, body.read)
@router.put("/{comm_id}", response_model=CommInDB)
async def update_comm(
comm_id: str,
body: CommUpdate,
_user: TokenPayload = Depends(require_permission("crm", "edit")),
):
return await service.update_comm(comm_id, body)
@router.delete("/{comm_id}", status_code=204)
async def delete_comm(
comm_id: str,
_user: TokenPayload = Depends(require_permission("crm", "edit")),
):
try:
comm = await service.get_comm(comm_id)
if comm.type == "email" and comm.ext_message_id:
await email_sync.delete_remote_email(
comm.ext_message_id,
comm.mail_account,
comm.from_addr,
)
except Exception:
pass
await service.delete_comm(comm_id)

View File

@@ -0,0 +1,71 @@
import asyncio
import logging
from fastapi import APIRouter, Depends, Query, BackgroundTasks
from typing import Optional
from auth.models import TokenPayload
from auth.dependencies import require_permission
from crm.models import CustomerCreate, CustomerUpdate, CustomerInDB, CustomerListResponse
from crm import service, nextcloud
from config import settings
router = APIRouter(prefix="/api/crm/customers", tags=["crm-customers"])
logger = logging.getLogger(__name__)
@router.get("", response_model=CustomerListResponse)
def list_customers(
search: Optional[str] = Query(None),
tag: Optional[str] = Query(None),
_user: TokenPayload = Depends(require_permission("crm", "view")),
):
customers = service.list_customers(search=search, tag=tag)
return CustomerListResponse(customers=customers, total=len(customers))
@router.get("/{customer_id}", response_model=CustomerInDB)
def get_customer(
customer_id: str,
_user: TokenPayload = Depends(require_permission("crm", "view")),
):
return service.get_customer(customer_id)
@router.post("", response_model=CustomerInDB, status_code=201)
async def create_customer(
body: CustomerCreate,
background_tasks: BackgroundTasks,
_user: TokenPayload = Depends(require_permission("crm", "edit")),
):
customer = service.create_customer(body)
if settings.nextcloud_url:
background_tasks.add_task(_init_nextcloud_folder, customer)
return customer
async def _init_nextcloud_folder(customer) -> None:
try:
nc_path = service.get_customer_nc_path(customer)
base = f"customers/{nc_path}"
for sub in ("media", "documents", "sent", "received"):
await nextcloud.ensure_folder(f"{base}/{sub}")
await nextcloud.write_info_file(base, customer.name, customer.id)
except Exception as e:
logger.warning("Nextcloud folder init failed for customer %s: %s", customer.id, e)
@router.put("/{customer_id}", response_model=CustomerInDB)
def update_customer(
customer_id: str,
body: CustomerUpdate,
_user: TokenPayload = Depends(require_permission("crm", "edit")),
):
return service.update_customer(customer_id, body)
@router.delete("/{customer_id}", status_code=204)
def delete_customer(
customer_id: str,
_user: TokenPayload = Depends(require_permission("crm", "edit")),
):
service.delete_customer(customer_id)

837
backend/crm/email_sync.py Normal file
View File

@@ -0,0 +1,837 @@
"""
IMAP email sync and SMTP email send for CRM.
Uses only stdlib imaplib/smtplib — no extra dependencies.
Sync is run in an executor to avoid blocking the event loop.
"""
import asyncio
import base64
import email
import email.header
import email.utils
import html.parser
import imaplib
import json
import logging
import re
import smtplib
import uuid
from datetime import datetime, timezone
from email.mime.base import MIMEBase
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email import encoders
from typing import List, Optional, Tuple
from config import settings
from mqtt import database as mqtt_db
from crm.mail_accounts import get_mail_accounts, account_by_key, account_by_email
logger = logging.getLogger("crm.email_sync")
# ---------------------------------------------------------------------------
# Helpers
# ---------------------------------------------------------------------------
def _decode_header(raw: str) -> str:
"""Decode an RFC2047-encoded email header value."""
if not raw:
return ""
parts = email.header.decode_header(raw)
decoded = []
for part, enc in parts:
if isinstance(part, bytes):
decoded.append(part.decode(enc or "utf-8", errors="replace"))
else:
decoded.append(part)
return " ".join(decoded)
class _HTMLStripper(html.parser.HTMLParser):
def __init__(self):
super().__init__()
self._text = []
def handle_data(self, data):
self._text.append(data)
def get_text(self):
return " ".join(self._text)
def _strip_html(html_str: str) -> str:
s = _HTMLStripper()
s.feed(html_str)
return s.get_text()
def _extract_inline_data_images(html_body: str) -> tuple[str, list[tuple[str, bytes, str]]]:
"""Replace data-URI images in HTML with cid: references and return inline parts.
Returns: (new_html, [(cid, image_bytes, mime_type), ...])
"""
if not html_body:
return "", []
inline_parts: list[tuple[str, bytes, str]] = []
seen: dict[str, str] = {} # data-uri -> cid
src_pattern = re.compile(r"""src=(['"])(data:image/[^'"]+)\1""", re.IGNORECASE)
data_pattern = re.compile(r"^data:(image/[a-zA-Z0-9.+-]+);base64,(.+)$", re.IGNORECASE | re.DOTALL)
def _replace(match: re.Match) -> str:
quote = match.group(1)
data_uri = match.group(2)
if data_uri in seen:
cid = seen[data_uri]
return f"src={quote}cid:{cid}{quote}"
parsed = data_pattern.match(data_uri)
if not parsed:
return match.group(0)
mime_type = parsed.group(1).lower()
b64_data = parsed.group(2).strip()
try:
payload = base64.b64decode(b64_data, validate=False)
except Exception:
return match.group(0)
cid = f"inline-{uuid.uuid4().hex}"
seen[data_uri] = cid
inline_parts.append((cid, payload, mime_type))
return f"src={quote}cid:{cid}{quote}"
return src_pattern.sub(_replace, html_body), inline_parts
def _load_customer_email_map() -> dict[str, str]:
"""Build a lookup of customer email -> customer_id from Firestore."""
from shared.firebase import get_db as get_firestore
firestore_db = get_firestore()
addr_to_customer: dict[str, str] = {}
for doc in firestore_db.collection("crm_customers").stream():
data = doc.to_dict() or {}
for contact in (data.get("contacts") or []):
if contact.get("type") == "email" and contact.get("value"):
addr_to_customer[str(contact["value"]).strip().lower()] = doc.id
return addr_to_customer
def _get_body(msg: email.message.Message) -> tuple[str, str]:
"""Extract (plain_text, html_body) from an email message.
Inline images (cid: references) are substituted with data-URIs so they
render correctly in a sandboxed iframe without external requests.
"""
import base64 as _b64
plain = None
html_body = None
# Map Content-ID → data-URI for inline images
cid_map: dict[str, str] = {}
if msg.is_multipart():
for part in msg.walk():
ct = part.get_content_type()
cd = str(part.get("Content-Disposition", ""))
cid = part.get("Content-ID", "").strip().strip("<>")
if "attachment" in cd:
continue
if ct == "text/plain" and plain is None:
raw = part.get_payload(decode=True)
charset = part.get_content_charset() or "utf-8"
plain = raw.decode(charset, errors="replace")
elif ct == "text/html" and html_body is None:
raw = part.get_payload(decode=True)
charset = part.get_content_charset() or "utf-8"
html_body = raw.decode(charset, errors="replace")
elif ct.startswith("image/") and cid:
raw = part.get_payload(decode=True)
if raw:
b64 = _b64.b64encode(raw).decode("ascii")
cid_map[cid] = f"data:{ct};base64,{b64}"
else:
ct = msg.get_content_type()
payload = msg.get_payload(decode=True)
charset = msg.get_content_charset() or "utf-8"
if payload:
text = payload.decode(charset, errors="replace")
if ct == "text/plain":
plain = text
elif ct == "text/html":
html_body = text
# Substitute cid: references with data-URIs
if html_body and cid_map:
for cid, data_uri in cid_map.items():
html_body = html_body.replace(f"cid:{cid}", data_uri)
plain_text = (plain or (html_body and _strip_html(html_body)) or "").strip()
return plain_text, (html_body or "").strip()
def _get_attachments(msg: email.message.Message) -> list[dict]:
"""Extract attachment info (filename, content_type, size) without storing content."""
attachments = []
if msg.is_multipart():
for part in msg.walk():
cd = str(part.get("Content-Disposition", ""))
if "attachment" in cd:
filename = part.get_filename() or "attachment"
filename = _decode_header(filename)
ct = part.get_content_type() or "application/octet-stream"
payload = part.get_payload(decode=True)
size = len(payload) if payload else 0
attachments.append({"filename": filename, "content_type": ct, "size": size})
return attachments
# ---------------------------------------------------------------------------
# IMAP sync (synchronous — called via run_in_executor)
# ---------------------------------------------------------------------------
def _sync_account_emails_sync(account: dict) -> tuple[list[dict], bool]:
if not account.get("imap_host") or not account.get("imap_username") or not account.get("imap_password"):
return [], False
if account.get("imap_use_ssl"):
imap = imaplib.IMAP4_SSL(account["imap_host"], int(account["imap_port"]))
else:
imap = imaplib.IMAP4(account["imap_host"], int(account["imap_port"]))
imap.login(account["imap_username"], account["imap_password"])
# readonly=True prevents marking messages as \Seen while syncing.
imap.select(account.get("imap_inbox", "INBOX"), readonly=True)
_, data = imap.search(None, "ALL")
uids = data[0].split() if data[0] else []
results = []
complete = True
for uid in uids:
try:
_, msg_data = imap.fetch(uid, "(FLAGS RFC822)")
meta = msg_data[0][0] if msg_data and isinstance(msg_data[0], tuple) else b""
raw = msg_data[0][1]
msg = email.message_from_bytes(raw)
message_id = msg.get("Message-ID", "").strip()
from_addr = email.utils.parseaddr(msg.get("From", ""))[1]
to_addrs_raw = msg.get("To", "")
to_addrs = [a for _, a in email.utils.getaddresses([to_addrs_raw])]
subject = _decode_header(msg.get("Subject", ""))
date_str = msg.get("Date", "")
try:
occurred_at = email.utils.parsedate_to_datetime(date_str).isoformat()
except Exception:
occurred_at = datetime.now(timezone.utc).isoformat()
is_read = b"\\Seen" in (meta or b"")
try:
body, body_html = _get_body(msg)
except Exception:
body, body_html = "", ""
try:
file_attachments = _get_attachments(msg)
except Exception:
file_attachments = []
results.append({
"mail_account": account["key"],
"message_id": message_id,
"from_addr": from_addr,
"to_addrs": to_addrs,
"subject": subject,
"body": body,
"body_html": body_html,
"attachments": file_attachments,
"occurred_at": occurred_at,
"is_read": bool(is_read),
})
except Exception as e:
complete = False
logger.warning(f"[EMAIL SYNC] Failed to parse message uid={uid} account={account['key']}: {e}")
imap.logout()
return results, complete
def _sync_emails_sync() -> tuple[list[dict], bool]:
all_msgs: list[dict] = []
all_complete = True
# Deduplicate by physical inbox source. Aliases often share the same mailbox.
seen_sources: set[tuple] = set()
for acc in get_mail_accounts():
if not acc.get("sync_inbound"):
continue
source = (
(acc.get("imap_host") or "").lower(),
int(acc.get("imap_port") or 0),
(acc.get("imap_username") or "").lower(),
(acc.get("imap_inbox") or "INBOX").upper(),
)
if source in seen_sources:
continue
seen_sources.add(source)
msgs, complete = _sync_account_emails_sync(acc)
all_msgs.extend(msgs)
all_complete = all_complete and complete
return all_msgs, all_complete
async def sync_emails() -> int:
"""
Pull emails from IMAP, match against CRM customers, store new ones.
Returns count of new entries created.
"""
if not get_mail_accounts():
return 0
loop = asyncio.get_event_loop()
try:
messages, fetch_complete = await loop.run_in_executor(None, _sync_emails_sync)
except Exception as e:
logger.error(f"[EMAIL SYNC] IMAP connect/fetch failed: {e}")
raise
db = await mqtt_db.get_db()
# Load all customer email contacts into a flat lookup: email -> customer_id
addr_to_customer = _load_customer_email_map()
# Load already-synced message-ids from DB
rows = await db.execute_fetchall(
"SELECT id, ext_message_id, COALESCE(mail_account, '') as mail_account, direction, is_read, customer_id "
"FROM crm_comms_log WHERE type='email' AND ext_message_id IS NOT NULL"
)
known_map = {
(r[1], r[2] or ""): {
"id": r[0],
"direction": r[3],
"is_read": int(r[4] or 0),
"customer_id": r[5],
}
for r in rows
}
new_count = 0
now = datetime.now(timezone.utc).isoformat()
server_ids_by_account: dict[str, set[str]] = {}
# Global inbound IDs from server snapshot, used to avoid account-classification delete oscillation.
inbound_server_ids: set[str] = set()
accounts = get_mail_accounts()
accounts_by_email = {a["email"].lower(): a for a in accounts}
# Initialize tracked inbound accounts even if inbox is empty.
for a in accounts:
if a.get("sync_inbound"):
server_ids_by_account[a["key"]] = set()
for msg in messages:
mid = msg["message_id"]
fetch_account_key = (msg.get("mail_account") or "").strip().lower()
from_addr = msg["from_addr"].lower()
to_addrs = [a.lower() for a in msg["to_addrs"]]
sender_acc = accounts_by_email.get(from_addr)
if sender_acc:
direction = "outbound"
resolved_account_key = sender_acc["key"]
customer_addrs = to_addrs
else:
direction = "inbound"
target_acc = None
for addr in to_addrs:
if addr in accounts_by_email:
target_acc = accounts_by_email[addr]
break
resolved_account_key = (target_acc["key"] if target_acc else fetch_account_key)
customer_addrs = [from_addr]
if target_acc and not target_acc.get("sync_inbound"):
# Ignore inbound for non-synced aliases (e.g. info/news).
continue
if direction == "inbound" and mid and resolved_account_key in server_ids_by_account:
server_ids_by_account[resolved_account_key].add(mid)
inbound_server_ids.add(mid)
# Find matching customer (may be None - we still store the email)
customer_id = None
for addr in customer_addrs:
if addr in addr_to_customer:
customer_id = addr_to_customer[addr]
break
if mid and (mid, resolved_account_key) in known_map:
existing = known_map[(mid, resolved_account_key)]
# Backfill customer linkage for rows created without customer_id.
if customer_id and not existing.get("customer_id"):
await db.execute(
"UPDATE crm_comms_log SET customer_id=? WHERE id=?",
(customer_id, existing["id"]),
)
# Existing inbound message: sync read/unread state from server.
if direction == "inbound":
server_read = 1 if msg.get("is_read") else 0
await db.execute(
"UPDATE crm_comms_log SET is_read=? "
"WHERE type='email' AND direction='inbound' AND ext_message_id=? AND mail_account=?",
(server_read, mid, resolved_account_key),
)
continue # already stored
attachments_json = json.dumps(msg.get("attachments") or [])
to_addrs_json = json.dumps(to_addrs)
entry_id = str(uuid.uuid4())
await db.execute(
"""INSERT INTO crm_comms_log
(id, customer_id, type, mail_account, direction, subject, body, body_html, attachments,
ext_message_id, from_addr, to_addrs, logged_by, occurred_at, created_at, is_read)
VALUES (?, ?, 'email', ?, ?, ?, ?, ?, ?, ?, ?, ?, 'system', ?, ?, ?)""",
(entry_id, customer_id, resolved_account_key, direction, msg["subject"], msg["body"],
msg.get("body_html", ""), attachments_json,
mid, from_addr, to_addrs_json, msg["occurred_at"], now, 1 if msg.get("is_read") else 0),
)
new_count += 1
# Mirror remote deletes based on global inbound message-id snapshot.
# To avoid transient IMAP inconsistency causing add/remove oscillation,
# require two consecutive "missing" syncs before local deletion.
sync_keys = [a["key"] for a in accounts if a.get("sync_inbound")]
if sync_keys and fetch_complete:
placeholders = ",".join("?" for _ in sync_keys)
local_rows = await db.execute_fetchall(
f"SELECT id, ext_message_id, mail_account FROM crm_comms_log "
f"WHERE type='email' AND direction='inbound' AND mail_account IN ({placeholders}) "
"AND ext_message_id IS NOT NULL",
sync_keys,
)
to_delete: list[str] = []
for row in local_rows:
row_id, ext_id, acc_key = row[0], row[1], row[2]
if not ext_id:
continue
state_key = f"missing_email::{acc_key}::{ext_id}"
if ext_id in inbound_server_ids:
await db.execute("DELETE FROM crm_sync_state WHERE key = ?", (state_key,))
continue
prev = await db.execute_fetchall("SELECT value FROM crm_sync_state WHERE key = ?", (state_key,))
prev_count = int(prev[0][0]) if prev and (prev[0][0] or "").isdigit() else 0
new_count = prev_count + 1
await db.execute(
"INSERT INTO crm_sync_state (key, value) VALUES (?, ?) "
"ON CONFLICT(key) DO UPDATE SET value=excluded.value",
(state_key, str(new_count)),
)
if new_count >= 2:
to_delete.append(row_id)
await db.execute("DELETE FROM crm_sync_state WHERE key = ?", (state_key,))
if to_delete:
del_ph = ",".join("?" for _ in to_delete)
await db.execute(f"DELETE FROM crm_comms_log WHERE id IN ({del_ph})", to_delete)
if new_count or server_ids_by_account:
await db.commit()
# Update last sync time
await db.execute(
"INSERT INTO crm_sync_state (key, value) VALUES ('last_email_sync', ?) "
"ON CONFLICT(key) DO UPDATE SET value=excluded.value",
(now,),
)
await db.commit()
logger.info(f"[EMAIL SYNC] Done — {new_count} new emails stored")
return new_count
# ---------------------------------------------------------------------------
# Lightweight new-mail check (synchronous — called via run_in_executor)
# ---------------------------------------------------------------------------
def _check_server_count_sync() -> int:
# Keep this for backward compatibility; no longer used by check_new_emails().
total = 0
seen_sources: set[tuple] = set()
for acc in get_mail_accounts():
if not acc.get("sync_inbound"):
continue
source = (
(acc.get("imap_host") or "").lower(),
int(acc.get("imap_port") or 0),
(acc.get("imap_username") or "").lower(),
(acc.get("imap_inbox") or "INBOX").upper(),
)
if source in seen_sources:
continue
seen_sources.add(source)
if acc.get("imap_use_ssl"):
imap = imaplib.IMAP4_SSL(acc["imap_host"], int(acc["imap_port"]))
else:
imap = imaplib.IMAP4(acc["imap_host"], int(acc["imap_port"]))
imap.login(acc["imap_username"], acc["imap_password"])
imap.select(acc.get("imap_inbox", "INBOX"), readonly=True)
_, data = imap.search(None, "ALL")
total += len(data[0].split()) if data[0] else 0
imap.logout()
return total
async def check_new_emails() -> dict:
"""
Compare server message count vs. locally stored count.
Returns {"new_count": int} — does NOT download or store anything.
"""
if not get_mail_accounts():
return {"new_count": 0}
loop = asyncio.get_event_loop()
try:
# Reuse same account-resolution logic as sync to avoid false positives.
messages, _ = await loop.run_in_executor(None, _sync_emails_sync)
except Exception as e:
logger.warning(f"[EMAIL CHECK] IMAP check failed: {e}")
raise
accounts = get_mail_accounts()
accounts_by_email = {a["email"].lower(): a for a in accounts}
db = await mqtt_db.get_db()
rows = await db.execute_fetchall(
"SELECT ext_message_id, COALESCE(mail_account, '') as mail_account FROM crm_comms_log "
"WHERE type='email' AND ext_message_id IS NOT NULL"
)
known_ids = {(r[0], r[1] or "") for r in rows}
new_count = 0
for msg in messages:
mid = (msg.get("message_id") or "").strip()
if not mid:
continue
fetch_account_key = (msg.get("mail_account") or "").strip().lower()
from_addr = (msg.get("from_addr") or "").lower()
to_addrs = [(a or "").lower() for a in (msg.get("to_addrs") or [])]
sender_acc = accounts_by_email.get(from_addr)
if sender_acc:
# Outbound copy in mailbox; not part of "new inbound mail" banner.
continue
target_acc = None
for addr in to_addrs:
if addr in accounts_by_email:
target_acc = accounts_by_email[addr]
break
resolved_account_key = (target_acc["key"] if target_acc else fetch_account_key)
if target_acc and not target_acc.get("sync_inbound"):
continue
if (mid, resolved_account_key) not in known_ids:
new_count += 1
return {"new_count": new_count}
# ---------------------------------------------------------------------------
# SMTP send (synchronous — called via run_in_executor)
# ---------------------------------------------------------------------------
def _append_to_sent_sync(account: dict, raw_message: bytes) -> None:
"""Best-effort append of sent MIME message to IMAP Sent folder."""
if not raw_message:
return
try:
if account.get("imap_use_ssl"):
imap = imaplib.IMAP4_SSL(account["imap_host"], int(account["imap_port"]))
else:
imap = imaplib.IMAP4(account["imap_host"], int(account["imap_port"]))
imap.login(account["imap_username"], account["imap_password"])
preferred = str(account.get("imap_sent") or "Sent").strip() or "Sent"
candidates = [preferred, "Sent", "INBOX.Sent", "Sent Items", "INBOX.Sent Items"]
seen = set()
ordered_candidates = []
for name in candidates:
key = name.lower()
if key not in seen:
seen.add(key)
ordered_candidates.append(name)
appended = False
for mailbox in ordered_candidates:
try:
status, _ = imap.append(mailbox, "\\Seen", None, raw_message)
if status == "OK":
appended = True
break
except Exception:
continue
if not appended:
logger.warning("[EMAIL SEND] Sent copy append failed for account=%s", account.get("key"))
imap.logout()
except Exception as e:
logger.warning("[EMAIL SEND] IMAP append to Sent failed for account=%s: %s", account.get("key"), e)
def _send_email_sync(
account: dict,
to: str,
subject: str,
body: str,
body_html: str,
cc: List[str],
file_attachments: Optional[List[Tuple[str, bytes, str]]] = None,
) -> str:
"""Send via SMTP. Returns the Message-ID header.
file_attachments: list of (filename, content_bytes, mime_type)
"""
html_with_cids, inline_images = _extract_inline_data_images(body_html or "")
# Build body tree:
# - with inline images: related(alternative(text/plain, text/html), image parts)
# - without inline images: alternative(text/plain, text/html)
if inline_images:
body_part = MIMEMultipart("related")
alt_part = MIMEMultipart("alternative")
alt_part.attach(MIMEText(body, "plain", "utf-8"))
if html_with_cids:
alt_part.attach(MIMEText(html_with_cids, "html", "utf-8"))
body_part.attach(alt_part)
for idx, (cid, content, mime_type) in enumerate(inline_images, start=1):
maintype, _, subtype = mime_type.partition("/")
img_part = MIMEBase(maintype or "image", subtype or "png")
img_part.set_payload(content)
encoders.encode_base64(img_part)
img_part.add_header("Content-ID", f"<{cid}>")
img_part.add_header("Content-Disposition", "inline", filename=f"inline-{idx}.{subtype or 'png'}")
body_part.attach(img_part)
else:
body_part = MIMEMultipart("alternative")
body_part.attach(MIMEText(body, "plain", "utf-8"))
if body_html:
body_part.attach(MIMEText(body_html, "html", "utf-8"))
# Wrap with mixed only when classic file attachments exist.
if file_attachments:
msg = MIMEMultipart("mixed")
msg.attach(body_part)
else:
msg = body_part
from_addr = account["email"]
msg["From"] = from_addr
msg["To"] = to
msg["Subject"] = subject
if cc:
msg["Cc"] = ", ".join(cc)
msg_id = f"<{uuid.uuid4()}@bellsystems>"
msg["Message-ID"] = msg_id
# Attach files
for filename, content, mime_type in (file_attachments or []):
maintype, _, subtype = mime_type.partition("/")
part = MIMEBase(maintype or "application", subtype or "octet-stream")
part.set_payload(content)
encoders.encode_base64(part)
part.add_header("Content-Disposition", "attachment", filename=filename)
msg.attach(part)
recipients = [to] + cc
raw_for_append = msg.as_bytes()
if account.get("smtp_use_tls"):
server = smtplib.SMTP(account["smtp_host"], int(account["smtp_port"]))
server.starttls()
else:
server = smtplib.SMTP_SSL(account["smtp_host"], int(account["smtp_port"]))
server.login(account["smtp_username"], account["smtp_password"])
server.sendmail(from_addr, recipients, msg.as_string())
server.quit()
_append_to_sent_sync(account, raw_for_append)
return msg_id
async def send_email(
customer_id: str | None,
from_account: str | None,
to: str,
subject: str,
body: str,
body_html: str,
cc: List[str],
sent_by: str,
file_attachments: Optional[List[Tuple[str, bytes, str]]] = None,
) -> dict:
"""Send an email and record it in crm_comms_log. Returns the new log entry.
file_attachments: list of (filename, content_bytes, mime_type)
"""
accounts = get_mail_accounts()
if not accounts:
raise RuntimeError("SMTP not configured")
account = account_by_key(from_account) if from_account else None
if not account:
raise RuntimeError("Please select a valid sender account")
if not account.get("allow_send"):
raise RuntimeError("Selected account is not allowed to send")
if not account.get("smtp_host") or not account.get("smtp_username") or not account.get("smtp_password"):
raise RuntimeError("SMTP not configured for selected account")
# If the caller did not provide a customer_id (e.g. compose from Mail page),
# auto-link by matching recipient addresses against CRM customer emails.
resolved_customer_id = customer_id
if not resolved_customer_id:
addr_to_customer = _load_customer_email_map()
rcpts = [to, *cc]
parsed_rcpts = [addr for _, addr in email.utils.getaddresses(rcpts) if addr]
for addr in parsed_rcpts:
key = (addr or "").strip().lower()
if key in addr_to_customer:
resolved_customer_id = addr_to_customer[key]
break
loop = asyncio.get_event_loop()
import functools
msg_id = await loop.run_in_executor(
None,
functools.partial(_send_email_sync, account, to, subject, body, body_html, cc, file_attachments or []),
)
# Upload attachments to Nextcloud and register in crm_media
comm_attachments = []
if file_attachments and resolved_customer_id:
from crm import nextcloud, service
from crm.models import MediaCreate, MediaDirection
from shared.firebase import get_db as get_firestore
firestore_db = get_firestore()
doc = firestore_db.collection("crm_customers").document(resolved_customer_id).get()
if doc.exists:
data = doc.to_dict()
# Build a minimal CustomerInDB-like object for get_customer_nc_path
folder_id = data.get("folder_id") or resolved_customer_id
nc_path = folder_id
for filename, content, mime_type in file_attachments:
# images/video → sent_media, everything else → documents
if mime_type.startswith("image/") or mime_type.startswith("video/"):
subfolder = "sent_media"
else:
subfolder = "documents"
target_folder = f"customers/{nc_path}/{subfolder}"
file_path = f"{target_folder}/{filename}"
try:
await nextcloud.ensure_folder(target_folder)
await nextcloud.upload_file(file_path, content, mime_type)
await service.create_media(MediaCreate(
customer_id=resolved_customer_id,
filename=filename,
nextcloud_path=file_path,
mime_type=mime_type,
direction=MediaDirection.sent,
tags=["email-attachment"],
uploaded_by=sent_by,
))
comm_attachments.append({"filename": filename, "nextcloud_path": file_path})
except Exception as e:
logger.warning(f"[EMAIL SEND] Failed to upload attachment {filename}: {e}")
now = datetime.now(timezone.utc).isoformat()
entry_id = str(uuid.uuid4())
db = await mqtt_db.get_db()
our_addr = account["email"].lower()
to_addrs_json = json.dumps([to] + cc)
attachments_json = json.dumps(comm_attachments)
await db.execute(
"""INSERT INTO crm_comms_log
(id, customer_id, type, mail_account, direction, subject, body, body_html, attachments,
ext_message_id, from_addr, to_addrs, logged_by, occurred_at, created_at)
VALUES (?, ?, 'email', ?, 'outbound', ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
(entry_id, resolved_customer_id, account["key"], subject, body, body_html, attachments_json, msg_id,
our_addr, to_addrs_json, sent_by, now, now),
)
await db.commit()
return {
"id": entry_id,
"customer_id": resolved_customer_id,
"type": "email",
"mail_account": account["key"],
"direction": "outbound",
"subject": subject,
"body": body,
"body_html": body_html,
"attachments": comm_attachments,
"ext_message_id": msg_id,
"from_addr": our_addr,
"to_addrs": [to] + cc,
"logged_by": sent_by,
"occurred_at": now,
"created_at": now,
}
def _delete_remote_email_sync(account: dict, ext_message_id: str) -> bool:
if not ext_message_id:
return False
if account.get("imap_use_ssl"):
imap = imaplib.IMAP4_SSL(account["imap_host"], int(account["imap_port"]))
else:
imap = imaplib.IMAP4(account["imap_host"], int(account["imap_port"]))
imap.login(account["imap_username"], account["imap_password"])
imap.select(account.get("imap_inbox", "INBOX"))
_, data = imap.search(None, f'HEADER Message-ID "{ext_message_id}"')
uids = data[0].split() if data and data[0] else []
if not uids:
imap.logout()
return False
for uid in uids:
imap.store(uid, "+FLAGS", "\\Deleted")
imap.expunge()
imap.logout()
return True
async def delete_remote_email(ext_message_id: str, mail_account: str | None, from_addr: str | None = None) -> bool:
account = account_by_key(mail_account) if mail_account else None
if not account:
account = account_by_email(from_addr)
if not account or not account.get("imap_host"):
return False
loop = asyncio.get_event_loop()
try:
return await loop.run_in_executor(None, lambda: _delete_remote_email_sync(account, ext_message_id))
except Exception as e:
logger.warning(f"[EMAIL DELETE] Failed remote delete for {ext_message_id}: {e}")
return False
def _set_remote_read_sync(account: dict, ext_message_id: str, read: bool) -> bool:
if not ext_message_id:
return False
if account.get("imap_use_ssl"):
imap = imaplib.IMAP4_SSL(account["imap_host"], int(account["imap_port"]))
else:
imap = imaplib.IMAP4(account["imap_host"], int(account["imap_port"]))
imap.login(account["imap_username"], account["imap_password"])
imap.select(account.get("imap_inbox", "INBOX"))
_, data = imap.search(None, f'HEADER Message-ID "{ext_message_id}"')
uids = data[0].split() if data and data[0] else []
if not uids:
imap.logout()
return False
flag_op = "+FLAGS" if read else "-FLAGS"
for uid in uids:
imap.store(uid, flag_op, "\\Seen")
imap.logout()
return True
async def set_remote_read(ext_message_id: str, mail_account: str | None, from_addr: str | None, read: bool) -> bool:
account = account_by_key(mail_account) if mail_account else None
if not account:
account = account_by_email(from_addr)
if not account or not account.get("imap_host"):
return False
loop = asyncio.get_event_loop()
try:
return await loop.run_in_executor(None, lambda: _set_remote_read_sync(account, ext_message_id, read))
except Exception as e:
logger.warning(f"[EMAIL READ] Failed remote read update for {ext_message_id}: {e}")
return False

View File

@@ -0,0 +1,104 @@
from __future__ import annotations
from typing import Any
from config import settings
def _bool(v: Any, default: bool) -> bool:
if isinstance(v, bool):
return v
if isinstance(v, str):
return v.strip().lower() in {"1", "true", "yes", "on"}
if v is None:
return default
return bool(v)
def get_mail_accounts() -> list[dict]:
"""
Returns normalized account dictionaries.
Falls back to legacy single-account config if MAIL_ACCOUNTS_JSON is empty.
"""
configured = settings.mail_accounts
normalized: list[dict] = []
for idx, raw in enumerate(configured):
if not isinstance(raw, dict):
continue
key = str(raw.get("key") or "").strip().lower()
email = str(raw.get("email") or "").strip().lower()
if not key or not email:
continue
normalized.append(
{
"key": key,
"label": str(raw.get("label") or key.title()),
"email": email,
"imap_host": raw.get("imap_host") or settings.imap_host,
"imap_port": int(raw.get("imap_port") or settings.imap_port or 993),
"imap_username": raw.get("imap_username") or email,
"imap_password": raw.get("imap_password") or settings.imap_password,
"imap_use_ssl": _bool(raw.get("imap_use_ssl"), settings.imap_use_ssl),
"imap_inbox": str(raw.get("imap_inbox") or "INBOX"),
"imap_sent": str(raw.get("imap_sent") or "Sent"),
"smtp_host": raw.get("smtp_host") or settings.smtp_host,
"smtp_port": int(raw.get("smtp_port") or settings.smtp_port or 587),
"smtp_username": raw.get("smtp_username") or email,
"smtp_password": raw.get("smtp_password") or settings.smtp_password,
"smtp_use_tls": _bool(raw.get("smtp_use_tls"), settings.smtp_use_tls),
"sync_inbound": _bool(raw.get("sync_inbound"), True),
"allow_send": _bool(raw.get("allow_send"), True),
}
)
if normalized:
return normalized
# Legacy single-account fallback
if settings.imap_host or settings.smtp_host:
legacy_email = (settings.smtp_username or settings.imap_username or "").strip().lower()
if legacy_email:
return [
{
"key": "default",
"label": "Default",
"email": legacy_email,
"imap_host": settings.imap_host,
"imap_port": settings.imap_port,
"imap_username": settings.imap_username,
"imap_password": settings.imap_password,
"imap_use_ssl": settings.imap_use_ssl,
"imap_inbox": "INBOX",
"imap_sent": "Sent",
"smtp_host": settings.smtp_host,
"smtp_port": settings.smtp_port,
"smtp_username": settings.smtp_username,
"smtp_password": settings.smtp_password,
"smtp_use_tls": settings.smtp_use_tls,
"sync_inbound": True,
"allow_send": True,
}
]
return []
def account_by_key(key: str | None) -> dict | None:
k = (key or "").strip().lower()
if not k:
return None
for acc in get_mail_accounts():
if acc["key"] == k:
return acc
return None
def account_by_email(email_addr: str | None) -> dict | None:
e = (email_addr or "").strip().lower()
if not e:
return None
for acc in get_mail_accounts():
if acc["email"] == e:
return acc
return None

View File

@@ -0,0 +1,35 @@
from fastapi import APIRouter, Depends, Query
from typing import Optional
from auth.models import TokenPayload
from auth.dependencies import require_permission
from crm.models import MediaCreate, MediaInDB, MediaListResponse
from crm import service
router = APIRouter(prefix="/api/crm/media", tags=["crm-media"])
@router.get("", response_model=MediaListResponse)
async def list_media(
customer_id: Optional[str] = Query(None),
order_id: Optional[str] = Query(None),
_user: TokenPayload = Depends(require_permission("crm", "view")),
):
items = await service.list_media(customer_id=customer_id, order_id=order_id)
return MediaListResponse(items=items, total=len(items))
@router.post("", response_model=MediaInDB, status_code=201)
async def create_media(
body: MediaCreate,
_user: TokenPayload = Depends(require_permission("crm", "edit")),
):
return await service.create_media(body)
@router.delete("/{media_id}", status_code=204)
async def delete_media(
media_id: str,
_user: TokenPayload = Depends(require_permission("crm", "edit")),
):
await service.delete_media(media_id)

353
backend/crm/models.py Normal file
View File

@@ -0,0 +1,353 @@
from enum import Enum
from typing import List, Optional
from pydantic import BaseModel
class ProductCategory(str, Enum):
controller = "controller"
striker = "striker"
clock = "clock"
part = "part"
repair_service = "repair_service"
class CostLineItem(BaseModel):
name: str
quantity: float = 1
price: float = 0.0
class ProductCosts(BaseModel):
labor_hours: Optional[float] = None
labor_rate: Optional[float] = None
items: List[CostLineItem] = []
total: Optional[float] = None
class ProductStock(BaseModel):
on_hand: int = 0
reserved: int = 0
available: int = 0
class ProductCreate(BaseModel):
name: str
sku: Optional[str] = None
category: ProductCategory
description: Optional[str] = None
price: float
currency: str = "EUR"
costs: Optional[ProductCosts] = None
stock: Optional[ProductStock] = None
active: bool = True
status: str = "active" # active | discontinued | planned
photo_url: Optional[str] = None
class ProductUpdate(BaseModel):
name: Optional[str] = None
sku: Optional[str] = None
category: Optional[ProductCategory] = None
description: Optional[str] = None
price: Optional[float] = None
currency: Optional[str] = None
costs: Optional[ProductCosts] = None
stock: Optional[ProductStock] = None
active: Optional[bool] = None
status: Optional[str] = None
photo_url: Optional[str] = None
class ProductInDB(ProductCreate):
id: str
created_at: str
updated_at: str
class ProductListResponse(BaseModel):
products: List[ProductInDB]
total: int
# ── Customers ────────────────────────────────────────────────────────────────
class ContactType(str, Enum):
email = "email"
phone = "phone"
whatsapp = "whatsapp"
other = "other"
class CustomerContact(BaseModel):
type: ContactType
label: str
value: str
primary: bool = False
class CustomerNote(BaseModel):
text: str
by: str
at: str
class OwnedItemType(str, Enum):
console_device = "console_device"
product = "product"
freetext = "freetext"
class OwnedItem(BaseModel):
type: OwnedItemType
# console_device fields
device_id: Optional[str] = None
label: Optional[str] = None
# product fields
product_id: Optional[str] = None
product_name: Optional[str] = None
quantity: Optional[int] = None
serial_numbers: Optional[List[str]] = None
# freetext fields
description: Optional[str] = None
serial_number: Optional[str] = None
notes: Optional[str] = None
class CustomerLocation(BaseModel):
city: Optional[str] = None
country: Optional[str] = None
region: Optional[str] = None
class CustomerCreate(BaseModel):
title: Optional[str] = None
name: str
surname: Optional[str] = None
organization: Optional[str] = None
contacts: List[CustomerContact] = []
notes: List[CustomerNote] = []
location: Optional[CustomerLocation] = None
language: str = "el"
tags: List[str] = []
owned_items: List[OwnedItem] = []
linked_user_ids: List[str] = []
nextcloud_folder: Optional[str] = None
folder_id: Optional[str] = None # Human-readable Nextcloud folder name, e.g. "saint-john-corfu"
class CustomerUpdate(BaseModel):
title: Optional[str] = None
name: Optional[str] = None
surname: Optional[str] = None
organization: Optional[str] = None
contacts: Optional[List[CustomerContact]] = None
notes: Optional[List[CustomerNote]] = None
location: Optional[CustomerLocation] = None
language: Optional[str] = None
tags: Optional[List[str]] = None
owned_items: Optional[List[OwnedItem]] = None
linked_user_ids: Optional[List[str]] = None
nextcloud_folder: Optional[str] = None
# folder_id intentionally excluded from update — set once at creation
class CustomerInDB(CustomerCreate):
id: str
created_at: str
updated_at: str
class CustomerListResponse(BaseModel):
customers: List[CustomerInDB]
total: int
# ── Orders ───────────────────────────────────────────────────────────────────
class OrderStatus(str, Enum):
draft = "draft"
confirmed = "confirmed"
in_production = "in_production"
shipped = "shipped"
delivered = "delivered"
cancelled = "cancelled"
class PaymentStatus(str, Enum):
pending = "pending"
partial = "partial"
paid = "paid"
class OrderDiscount(BaseModel):
type: str # "percentage" | "fixed"
value: float = 0
reason: Optional[str] = None
class OrderShipping(BaseModel):
method: Optional[str] = None
tracking_number: Optional[str] = None
carrier: Optional[str] = None
shipped_at: Optional[str] = None
delivered_at: Optional[str] = None
destination: Optional[str] = None
class OrderItem(BaseModel):
type: str # console_device | product | freetext
product_id: Optional[str] = None
product_name: Optional[str] = None
description: Optional[str] = None
quantity: int = 1
unit_price: float = 0.0
serial_numbers: List[str] = []
class OrderCreate(BaseModel):
customer_id: str
order_number: Optional[str] = None
status: OrderStatus = OrderStatus.draft
items: List[OrderItem] = []
subtotal: float = 0
discount: Optional[OrderDiscount] = None
total_price: float = 0
currency: str = "EUR"
shipping: Optional[OrderShipping] = None
payment_status: PaymentStatus = PaymentStatus.pending
invoice_path: Optional[str] = None
notes: Optional[str] = None
class OrderUpdate(BaseModel):
customer_id: Optional[str] = None
order_number: Optional[str] = None
status: Optional[OrderStatus] = None
items: Optional[List[OrderItem]] = None
subtotal: Optional[float] = None
discount: Optional[OrderDiscount] = None
total_price: Optional[float] = None
currency: Optional[str] = None
shipping: Optional[OrderShipping] = None
payment_status: Optional[PaymentStatus] = None
invoice_path: Optional[str] = None
notes: Optional[str] = None
class OrderInDB(OrderCreate):
id: str
created_at: str
updated_at: str
class OrderListResponse(BaseModel):
orders: List[OrderInDB]
total: int
# ── Comms Log ─────────────────────────────────────────────────────────────────
class CommType(str, Enum):
email = "email"
whatsapp = "whatsapp"
call = "call"
sms = "sms"
note = "note"
in_person = "in_person"
class CommDirection(str, Enum):
inbound = "inbound"
outbound = "outbound"
internal = "internal"
class CommAttachment(BaseModel):
filename: str
nextcloud_path: Optional[str] = None
content_type: Optional[str] = None
size: Optional[int] = None
class CommCreate(BaseModel):
customer_id: Optional[str] = None
type: CommType
mail_account: Optional[str] = None
direction: CommDirection
subject: Optional[str] = None
body: Optional[str] = None
body_html: Optional[str] = None
attachments: List[CommAttachment] = []
ext_message_id: Optional[str] = None
from_addr: Optional[str] = None
to_addrs: Optional[List[str]] = None
logged_by: Optional[str] = None
occurred_at: Optional[str] = None # defaults to now if not provided
class CommUpdate(BaseModel):
subject: Optional[str] = None
body: Optional[str] = None
occurred_at: Optional[str] = None
class CommInDB(BaseModel):
id: str
customer_id: Optional[str] = None
type: CommType
mail_account: Optional[str] = None
direction: CommDirection
subject: Optional[str] = None
body: Optional[str] = None
body_html: Optional[str] = None
attachments: List[CommAttachment] = []
ext_message_id: Optional[str] = None
from_addr: Optional[str] = None
to_addrs: Optional[List[str]] = None
logged_by: Optional[str] = None
occurred_at: str
created_at: str
is_important: bool = False
is_read: bool = False
class CommListResponse(BaseModel):
entries: List[CommInDB]
total: int
# ── Media ─────────────────────────────────────────────────────────────────────
class MediaDirection(str, Enum):
received = "received"
sent = "sent"
internal = "internal"
class MediaCreate(BaseModel):
customer_id: Optional[str] = None
order_id: Optional[str] = None
filename: str
nextcloud_path: str
mime_type: Optional[str] = None
direction: Optional[MediaDirection] = None
tags: List[str] = []
uploaded_by: Optional[str] = None
class MediaInDB(BaseModel):
id: str
customer_id: Optional[str] = None
order_id: Optional[str] = None
filename: str
nextcloud_path: str
mime_type: Optional[str] = None
direction: Optional[MediaDirection] = None
tags: List[str] = []
uploaded_by: Optional[str] = None
created_at: str
class MediaListResponse(BaseModel):
items: List[MediaInDB]
total: int

314
backend/crm/nextcloud.py Normal file
View File

@@ -0,0 +1,314 @@
"""
Nextcloud WebDAV client.
All paths passed to these functions are relative to `settings.nextcloud_base_path`.
The full WebDAV URL is:
{nextcloud_url}/remote.php/dav/files/{username}/{base_path}/{relative_path}
"""
import xml.etree.ElementTree as ET
from typing import List
from urllib.parse import unquote
import httpx
from fastapi import HTTPException
from config import settings
DAV_NS = "DAV:"
# Default timeout for all Nextcloud WebDAV requests (seconds)
_TIMEOUT = 60.0
# Shared async client — reuses TCP connections across requests so Nextcloud
# doesn't see rapid connection bursts that trigger brute-force throttling.
_http_client: httpx.AsyncClient | None = None
def _get_client() -> httpx.AsyncClient:
global _http_client
if _http_client is None or _http_client.is_closed:
_http_client = httpx.AsyncClient(
timeout=_TIMEOUT,
follow_redirects=True,
headers={"User-Agent": "BellSystems-CP/1.0"},
)
return _http_client
async def close_client() -> None:
"""Close the shared HTTP client. Call this on application shutdown."""
global _http_client
if _http_client and not _http_client.is_closed:
await _http_client.aclose()
_http_client = None
async def keepalive_ping() -> None:
"""
Send a lightweight PROPFIND Depth:0 to the Nextcloud base folder to keep
the TCP connection alive. Safe to call even if Nextcloud is not configured.
"""
if not settings.nextcloud_url:
return
try:
url = _base_url()
client = _get_client()
await client.request(
"PROPFIND",
url,
auth=_auth(),
headers={"Depth": "0", "Content-Type": "application/xml"},
content=_PROPFIND_BODY,
)
except Exception as e:
print(f"[NEXTCLOUD KEEPALIVE] ping failed: {e}")
def _dav_user() -> str:
"""The username used in the WebDAV URL path (may differ from the login username)."""
return settings.nextcloud_dav_user or settings.nextcloud_username
def _base_url() -> str:
if not settings.nextcloud_url:
raise HTTPException(status_code=503, detail="Nextcloud not configured")
return (
f"{settings.nextcloud_url.rstrip('/')}"
f"/remote.php/dav/files/{_dav_user()}"
f"/{settings.nextcloud_base_path}"
)
def _auth() -> tuple[str, str]:
return (settings.nextcloud_username, settings.nextcloud_password)
def _full_url(relative_path: str) -> str:
"""Build full WebDAV URL for a relative path."""
path = relative_path.strip("/")
base = _base_url()
return f"{base}/{path}" if path else base
def _parse_propfind(xml_bytes: bytes, base_path_prefix: str) -> List[dict]:
"""
Parse a PROPFIND XML response.
Returns list of file/folder entries, skipping the root itself.
"""
root = ET.fromstring(xml_bytes)
results = []
# The prefix we need to strip from D:href to get the relative path back
# href looks like: /remote.php/dav/files/user/BellSystems/Console/customers/abc/
dav_prefix = (
f"/remote.php/dav/files/{_dav_user()}"
f"/{settings.nextcloud_base_path}/"
)
for response in root.findall(f"{{{DAV_NS}}}response"):
href_el = response.find(f"{{{DAV_NS}}}href")
if href_el is None:
continue
href = unquote(href_el.text or "")
# Strip DAV prefix to get relative path within base_path
if href.startswith(dav_prefix):
rel = href[len(dav_prefix):].rstrip("/")
else:
rel = href
# Skip the folder itself (the root of the PROPFIND request)
if rel == base_path_prefix.strip("/"):
continue
propstat = response.find(f"{{{DAV_NS}}}propstat")
if propstat is None:
continue
prop = propstat.find(f"{{{DAV_NS}}}prop")
if prop is None:
continue
# is_dir: resourcetype contains D:collection
resource_type = prop.find(f"{{{DAV_NS}}}resourcetype")
is_dir = resource_type is not None and resource_type.find(f"{{{DAV_NS}}}collection") is not None
content_type_el = prop.find(f"{{{DAV_NS}}}getcontenttype")
mime_type = content_type_el.text if content_type_el is not None else (
"inode/directory" if is_dir else "application/octet-stream"
)
size_el = prop.find(f"{{{DAV_NS}}}getcontentlength")
size = int(size_el.text) if size_el is not None and size_el.text else 0
modified_el = prop.find(f"{{{DAV_NS}}}getlastmodified")
last_modified = modified_el.text if modified_el is not None else None
filename = rel.split("/")[-1] if rel else ""
results.append({
"filename": filename,
"path": rel,
"mime_type": mime_type,
"size": size,
"last_modified": last_modified,
"is_dir": is_dir,
})
return results
async def ensure_folder(relative_path: str) -> None:
"""
Create a folder (and all parents) in Nextcloud via MKCOL.
Includes the base_path segments so the full hierarchy is created from scratch.
Silently succeeds if folders already exist.
"""
# Build the complete path list: base_path segments + relative_path segments
base_parts = settings.nextcloud_base_path.strip("/").split("/")
rel_parts = relative_path.strip("/").split("/") if relative_path.strip("/") else []
all_parts = base_parts + rel_parts
dav_root = f"{settings.nextcloud_url.rstrip('/')}/remote.php/dav/files/{_dav_user()}"
client = _get_client()
built = ""
for part in all_parts:
built = f"{built}/{part}" if built else part
url = f"{dav_root}/{built}"
resp = await client.request("MKCOL", url, auth=_auth())
# 201 = created, 405/409 = already exists — both are fine
if resp.status_code not in (201, 405, 409):
raise HTTPException(
status_code=502,
detail=f"Failed to create Nextcloud folder '{built}': {resp.status_code}",
)
async def write_info_file(customer_folder: str, customer_name: str, customer_id: str) -> None:
"""Write a _info.txt stub into a new customer folder for human browsability."""
content = f"Customer: {customer_name}\nID: {customer_id}\n"
await upload_file(
f"{customer_folder}/_info.txt",
content.encode("utf-8"),
"text/plain",
)
_PROPFIND_BODY = b"""<?xml version="1.0"?>
<D:propfind xmlns:D="DAV:">
<D:prop>
<D:resourcetype/>
<D:getcontenttype/>
<D:getcontentlength/>
<D:getlastmodified/>
</D:prop>
</D:propfind>"""
async def list_folder(relative_path: str) -> List[dict]:
"""
PROPFIND at depth=1 to list a folder's immediate children.
relative_path is relative to nextcloud_base_path.
"""
url = _full_url(relative_path)
client = _get_client()
resp = await client.request(
"PROPFIND",
url,
auth=_auth(),
headers={"Depth": "1", "Content-Type": "application/xml"},
content=_PROPFIND_BODY,
)
if resp.status_code == 404:
return []
if resp.status_code not in (207, 200):
raise HTTPException(status_code=502, detail=f"Nextcloud PROPFIND failed: {resp.status_code}")
return _parse_propfind(resp.content, relative_path)
async def list_folder_recursive(relative_path: str) -> List[dict]:
"""
Recursively list ALL files under a folder (any depth).
Tries Depth:infinity first (single call). Falls back to manual recursion
via Depth:1 if the server returns 403/400 (some servers disable infinity).
Returns only file entries (is_dir=False).
"""
url = _full_url(relative_path)
client = _get_client()
resp = await client.request(
"PROPFIND",
url,
auth=_auth(),
headers={"Depth": "infinity", "Content-Type": "application/xml"},
content=_PROPFIND_BODY,
)
if resp.status_code in (207, 200):
all_items = _parse_propfind(resp.content, relative_path)
return [item for item in all_items if not item["is_dir"]]
# Depth:infinity not supported — fall back to recursive Depth:1
if resp.status_code in (403, 400, 412):
return await _list_recursive_fallback(relative_path)
if resp.status_code == 404:
return []
raise HTTPException(status_code=502, detail=f"Nextcloud PROPFIND failed: {resp.status_code}")
async def _list_recursive_fallback(relative_path: str) -> List[dict]:
"""Manually recurse via Depth:1 calls when Depth:infinity is blocked."""
items = await list_folder(relative_path)
files = []
dirs = []
for item in items:
if item["is_dir"]:
dirs.append(item["path"])
else:
files.append(item)
for dir_path in dirs:
child_files = await _list_recursive_fallback(dir_path)
files.extend(child_files)
return files
async def upload_file(relative_path: str, content: bytes, mime_type: str) -> str:
"""
PUT a file to Nextcloud. Returns the relative_path on success.
relative_path includes filename, e.g. "customers/abc123/media/photo.jpg"
"""
url = _full_url(relative_path)
client = _get_client()
resp = await client.put(
url,
auth=_auth(),
content=content,
headers={"Content-Type": mime_type},
)
if resp.status_code not in (200, 201, 204):
raise HTTPException(status_code=502, detail=f"Nextcloud upload failed: {resp.status_code}")
return relative_path
async def download_file(relative_path: str) -> tuple[bytes, str]:
"""
GET a file from Nextcloud. Returns (bytes, mime_type).
"""
url = _full_url(relative_path)
client = _get_client()
resp = await client.get(url, auth=_auth())
if resp.status_code == 404:
raise HTTPException(status_code=404, detail="File not found in Nextcloud")
if resp.status_code != 200:
raise HTTPException(status_code=502, detail=f"Nextcloud download failed: {resp.status_code}")
mime = resp.headers.get("content-type", "application/octet-stream").split(";")[0].strip()
return resp.content, mime
async def delete_file(relative_path: str) -> None:
"""DELETE a file from Nextcloud."""
url = _full_url(relative_path)
client = _get_client()
resp = await client.request("DELETE", url, auth=_auth())
if resp.status_code not in (200, 204, 404):
raise HTTPException(status_code=502, detail=f"Nextcloud delete failed: {resp.status_code}")

View File

@@ -0,0 +1,305 @@
"""
Nextcloud WebDAV proxy endpoints.
Folder convention (all paths relative to nextcloud_base_path = BellSystems/Console):
customers/{folder_id}/media/
customers/{folder_id}/documents/
customers/{folder_id}/sent/
customers/{folder_id}/received/
folder_id = customer.folder_id if set, else customer.id (legacy fallback).
"""
from fastapi import APIRouter, Depends, Query, UploadFile, File, Form, Response, HTTPException, Request
from typing import Optional
from jose import JWTError
from auth.models import TokenPayload
from auth.dependencies import require_permission
from auth.utils import decode_access_token
from crm import nextcloud, service
from crm.models import MediaCreate, MediaDirection
router = APIRouter(prefix="/api/crm/nextcloud", tags=["crm-nextcloud"])
DIRECTION_MAP = {
"sent": MediaDirection.sent,
"received": MediaDirection.received,
"internal": MediaDirection.internal,
"media": MediaDirection.internal,
"documents": MediaDirection.internal,
}
@router.get("/browse")
async def browse(
path: str = Query(..., description="Path relative to nextcloud_base_path"),
_user: TokenPayload = Depends(require_permission("crm", "view")),
):
"""List immediate children of a Nextcloud folder."""
items = await nextcloud.list_folder(path)
return {"path": path, "items": items}
@router.get("/browse-all")
async def browse_all(
customer_id: str = Query(..., description="Customer ID"),
_user: TokenPayload = Depends(require_permission("crm", "view")),
):
"""
Recursively list ALL files for a customer across all subfolders and any depth.
Uses Depth:infinity (one WebDAV call) with automatic fallback to recursive Depth:1.
Each file item includes a 'subfolder' key derived from its path.
"""
customer = service.get_customer(customer_id)
nc_path = service.get_customer_nc_path(customer)
base = f"customers/{nc_path}"
all_files = await nextcloud.list_folder_recursive(base)
# Tag each file with the top-level subfolder it lives under
for item in all_files:
parts = item["path"].split("/")
# path looks like: customers/{nc_path}/{subfolder}/[...]/filename
# parts[0]=customers, parts[1]={nc_path}, parts[2]={subfolder}
item["subfolder"] = parts[2] if len(parts) > 2 else "other"
return {"items": all_files}
@router.get("/file")
async def proxy_file(
request: Request,
path: str = Query(..., description="Path relative to nextcloud_base_path"),
token: Optional[str] = Query(None, description="JWT token for browser-native requests (img src, video src, a href) that cannot send an Authorization header"),
):
"""
Stream a file from Nextcloud through the backend (proxy).
Supports HTTP Range requests so videos can be seeked and start playing immediately.
Accepts auth via Authorization: Bearer header OR ?token= query param.
"""
if token is None:
raise HTTPException(status_code=403, detail="Not authenticated")
try:
decode_access_token(token)
except (JWTError, KeyError):
raise HTTPException(status_code=403, detail="Invalid token")
content, mime_type = await nextcloud.download_file(path)
total = len(content)
range_header = request.headers.get("range")
if range_header and range_header.startswith("bytes="):
# Parse "bytes=start-end"
try:
range_spec = range_header[6:]
start_str, _, end_str = range_spec.partition("-")
start = int(start_str) if start_str else 0
end = int(end_str) if end_str else total - 1
end = min(end, total - 1)
chunk = content[start:end + 1]
headers = {
"Content-Range": f"bytes {start}-{end}/{total}",
"Accept-Ranges": "bytes",
"Content-Length": str(len(chunk)),
"Content-Type": mime_type,
}
return Response(content=chunk, status_code=206, headers=headers, media_type=mime_type)
except (ValueError, IndexError):
pass
return Response(
content=content,
media_type=mime_type,
headers={"Accept-Ranges": "bytes", "Content-Length": str(total)},
)
@router.put("/file-put")
async def put_file(
request: Request,
path: str = Query(..., description="Path relative to nextcloud_base_path"),
token: Optional[str] = Query(None),
):
"""
Overwrite a file in Nextcloud with a new body (used for TXT in-browser editing).
Auth via ?token= query param (same pattern as /file GET).
"""
if token is None:
raise HTTPException(status_code=403, detail="Not authenticated")
try:
decode_access_token(token)
except (JWTError, KeyError):
raise HTTPException(status_code=403, detail="Invalid token")
body = await request.body()
content_type = request.headers.get("content-type", "text/plain")
await nextcloud.upload_file(path, body, content_type)
return {"updated": path}
@router.post("/upload")
async def upload_file(
file: UploadFile = File(...),
customer_id: str = Form(...),
subfolder: str = Form("media"), # "media" | "documents" | "sent" | "received"
direction: Optional[str] = Form(None),
tags: Optional[str] = Form(None),
_user: TokenPayload = Depends(require_permission("crm", "edit")),
):
"""
Upload a file to the customer's Nextcloud folder and record it in crm_media.
Uses the customer's folder_id as the NC path (falls back to UUID for legacy records).
"""
customer = service.get_customer(customer_id)
nc_path = service.get_customer_nc_path(customer)
target_folder = f"customers/{nc_path}/{subfolder}"
file_path = f"{target_folder}/{file.filename}"
# Ensure the target subfolder exists (idempotent, fast for existing folders)
await nextcloud.ensure_folder(target_folder)
# Read and upload
content = await file.read()
mime_type = file.content_type or "application/octet-stream"
await nextcloud.upload_file(file_path, content, mime_type)
# Resolve direction
resolved_direction = None
if direction:
try:
resolved_direction = MediaDirection(direction)
except ValueError:
resolved_direction = DIRECTION_MAP.get(subfolder, MediaDirection.internal)
else:
resolved_direction = DIRECTION_MAP.get(subfolder, MediaDirection.internal)
# Save metadata record
tag_list = [t.strip() for t in tags.split(",")] if tags else []
media_record = await service.create_media(MediaCreate(
customer_id=customer_id,
filename=file.filename,
nextcloud_path=file_path,
mime_type=mime_type,
direction=resolved_direction,
tags=tag_list,
uploaded_by=_user.name,
))
return media_record
@router.delete("/file")
async def delete_file(
path: str = Query(..., description="Path relative to nextcloud_base_path"),
_user: TokenPayload = Depends(require_permission("crm", "edit")),
):
"""Delete a file from Nextcloud and remove the matching crm_media record if found."""
await nextcloud.delete_file(path)
# Best-effort: delete the DB record if one matches this path
media_list = await service.list_media()
for m in media_list:
if m.nextcloud_path == path:
try:
await service.delete_media(m.id)
except Exception:
pass
break
return {"deleted": path}
@router.post("/init-customer-folder")
async def init_customer_folder(
customer_id: str = Form(...),
customer_name: str = Form(...),
_user: TokenPayload = Depends(require_permission("crm", "edit")),
):
"""
Create the standard folder structure for a customer in Nextcloud
and write an _info.txt stub for human readability.
"""
customer = service.get_customer(customer_id)
nc_path = service.get_customer_nc_path(customer)
base = f"customers/{nc_path}"
for sub in ("media", "documents", "sent", "received"):
await nextcloud.ensure_folder(f"{base}/{sub}")
await nextcloud.write_info_file(base, customer_name, customer_id)
return {"initialized": base}
@router.post("/sync")
async def sync_nextcloud_files(
customer_id: str = Form(...),
_user: TokenPayload = Depends(require_permission("crm", "edit")),
):
"""
Scan the customer's Nextcloud folder and register any files not yet tracked in the DB.
Returns counts of newly synced and skipped (already tracked) files.
"""
customer = service.get_customer(customer_id)
nc_path = service.get_customer_nc_path(customer)
base = f"customers/{nc_path}"
# Collect all NC files recursively (handles nested folders at any depth)
all_nc_files = await nextcloud.list_folder_recursive(base)
for item in all_nc_files:
parts = item["path"].split("/")
item["_subfolder"] = parts[2] if len(parts) > 2 else "media"
# Get existing DB records for this customer
existing = await service.list_media(customer_id=customer_id)
tracked_paths = {m.nextcloud_path for m in existing}
synced = 0
skipped = 0
for f in all_nc_files:
if f["path"] in tracked_paths:
skipped += 1
continue
sub = f["_subfolder"]
direction = DIRECTION_MAP.get(sub, MediaDirection.internal)
await service.create_media(MediaCreate(
customer_id=customer_id,
filename=f["filename"],
nextcloud_path=f["path"],
mime_type=f.get("mime_type") or "application/octet-stream",
direction=direction,
tags=[],
uploaded_by="nextcloud-sync",
))
synced += 1
return {"synced": synced, "skipped": skipped}
@router.post("/untrack-deleted")
async def untrack_deleted_files(
customer_id: str = Form(...),
_user: TokenPayload = Depends(require_permission("crm", "edit")),
):
"""
Remove DB records for files that no longer exist in Nextcloud.
Returns count of untracked records.
"""
customer = service.get_customer(customer_id)
nc_path = service.get_customer_nc_path(customer)
base = f"customers/{nc_path}"
# Collect all NC file paths recursively
all_nc_files = await nextcloud.list_folder_recursive(base)
nc_paths = {item["path"] for item in all_nc_files}
# Find DB records whose NC path no longer exists
existing = await service.list_media(customer_id=customer_id)
untracked = 0
for m in existing:
if m.nextcloud_path and m.nextcloud_path not in nc_paths:
try:
await service.delete_media(m.id)
untracked += 1
except Exception:
pass
return {"untracked": untracked}

View File

@@ -0,0 +1,57 @@
from fastapi import APIRouter, Depends, Query
from typing import Optional
from auth.models import TokenPayload
from auth.dependencies import require_permission
from crm.models import OrderCreate, OrderUpdate, OrderInDB, OrderListResponse
from crm import service
router = APIRouter(prefix="/api/crm/orders", tags=["crm-orders"])
@router.get("", response_model=OrderListResponse)
def list_orders(
customer_id: Optional[str] = Query(None),
status: Optional[str] = Query(None),
payment_status: Optional[str] = Query(None),
_user: TokenPayload = Depends(require_permission("crm", "view")),
):
orders = service.list_orders(
customer_id=customer_id,
status=status,
payment_status=payment_status,
)
return OrderListResponse(orders=orders, total=len(orders))
@router.get("/{order_id}", response_model=OrderInDB)
def get_order(
order_id: str,
_user: TokenPayload = Depends(require_permission("crm", "view")),
):
return service.get_order(order_id)
@router.post("", response_model=OrderInDB, status_code=201)
def create_order(
body: OrderCreate,
_user: TokenPayload = Depends(require_permission("crm", "edit")),
):
return service.create_order(body)
@router.put("/{order_id}", response_model=OrderInDB)
def update_order(
order_id: str,
body: OrderUpdate,
_user: TokenPayload = Depends(require_permission("crm", "edit")),
):
return service.update_order(order_id, body)
@router.delete("/{order_id}", status_code=204)
def delete_order(
order_id: str,
_user: TokenPayload = Depends(require_permission("crm", "edit")),
):
service.delete_order(order_id)

View File

@@ -0,0 +1,141 @@
from enum import Enum
from typing import Any, Dict, List, Optional
from pydantic import BaseModel
class QuotationStatus(str, Enum):
draft = "draft"
sent = "sent"
accepted = "accepted"
rejected = "rejected"
class QuotationItemCreate(BaseModel):
product_id: Optional[str] = None
description: Optional[str] = None
unit_type: str = "pcs" # pcs / kg / m
unit_cost: float = 0.0
discount_percent: float = 0.0
quantity: float = 1.0
vat_percent: float = 24.0
sort_order: int = 0
class QuotationItemInDB(QuotationItemCreate):
id: str
quotation_id: str
line_total: float = 0.0
class QuotationCreate(BaseModel):
customer_id: str
title: Optional[str] = None
subtitle: Optional[str] = None
language: str = "en" # en / gr
order_type: Optional[str] = None
shipping_method: Optional[str] = None
estimated_shipping_date: Optional[str] = None
global_discount_label: Optional[str] = None
global_discount_percent: float = 0.0
shipping_cost: float = 0.0
shipping_cost_discount: float = 0.0
install_cost: float = 0.0
install_cost_discount: float = 0.0
extras_label: Optional[str] = None
extras_cost: float = 0.0
comments: List[str] = []
quick_notes: Optional[Dict[str, Any]] = None
items: List[QuotationItemCreate] = []
# Client override fields (for this quotation only; customer record is not modified)
client_org: Optional[str] = None
client_name: Optional[str] = None
client_location: Optional[str] = None
client_phone: Optional[str] = None
client_email: Optional[str] = None
class QuotationUpdate(BaseModel):
title: Optional[str] = None
subtitle: Optional[str] = None
language: Optional[str] = None
status: Optional[QuotationStatus] = None
order_type: Optional[str] = None
shipping_method: Optional[str] = None
estimated_shipping_date: Optional[str] = None
global_discount_label: Optional[str] = None
global_discount_percent: Optional[float] = None
shipping_cost: Optional[float] = None
shipping_cost_discount: Optional[float] = None
install_cost: Optional[float] = None
install_cost_discount: Optional[float] = None
extras_label: Optional[str] = None
extras_cost: Optional[float] = None
comments: Optional[List[str]] = None
quick_notes: Optional[Dict[str, Any]] = None
items: Optional[List[QuotationItemCreate]] = None
# Client override fields
client_org: Optional[str] = None
client_name: Optional[str] = None
client_location: Optional[str] = None
client_phone: Optional[str] = None
client_email: Optional[str] = None
class QuotationInDB(BaseModel):
id: str
quotation_number: str
customer_id: str
title: Optional[str] = None
subtitle: Optional[str] = None
language: str = "en"
status: QuotationStatus = QuotationStatus.draft
order_type: Optional[str] = None
shipping_method: Optional[str] = None
estimated_shipping_date: Optional[str] = None
global_discount_label: Optional[str] = None
global_discount_percent: float = 0.0
shipping_cost: float = 0.0
shipping_cost_discount: float = 0.0
install_cost: float = 0.0
install_cost_discount: float = 0.0
extras_label: Optional[str] = None
extras_cost: float = 0.0
comments: List[str] = []
quick_notes: Dict[str, Any] = {}
subtotal_before_discount: float = 0.0
global_discount_amount: float = 0.0
new_subtotal: float = 0.0
vat_amount: float = 0.0
final_total: float = 0.0
nextcloud_pdf_path: Optional[str] = None
nextcloud_pdf_url: Optional[str] = None
created_at: str
updated_at: str
items: List[QuotationItemInDB] = []
# Client override fields
client_org: Optional[str] = None
client_name: Optional[str] = None
client_location: Optional[str] = None
client_phone: Optional[str] = None
client_email: Optional[str] = None
class QuotationListItem(BaseModel):
id: str
quotation_number: str
title: Optional[str] = None
customer_id: str
status: QuotationStatus
final_total: float
created_at: str
updated_at: str
nextcloud_pdf_url: Optional[str] = None
class QuotationListResponse(BaseModel):
quotations: List[QuotationListItem]
total: int
class NextNumberResponse(BaseModel):
next_number: str

View File

@@ -0,0 +1,101 @@
from fastapi import APIRouter, Depends, Query
from fastapi.responses import StreamingResponse
from typing import Optional
import io
from auth.dependencies import require_permission
from auth.models import TokenPayload
from crm.quotation_models import (
NextNumberResponse,
QuotationCreate,
QuotationInDB,
QuotationListResponse,
QuotationUpdate,
)
from crm import quotations_service as svc
router = APIRouter(prefix="/api/crm/quotations", tags=["crm-quotations"])
# IMPORTANT: Static paths must come BEFORE /{id} to avoid route collision in FastAPI
@router.get("/next-number", response_model=NextNumberResponse)
async def get_next_number(
_user: TokenPayload = Depends(require_permission("crm", "view")),
):
"""Returns the next available quotation number (preview only — does not commit)."""
next_num = await svc.get_next_number()
return NextNumberResponse(next_number=next_num)
@router.get("/customer/{customer_id}", response_model=QuotationListResponse)
async def list_quotations_for_customer(
customer_id: str,
_user: TokenPayload = Depends(require_permission("crm", "view")),
):
quotations = await svc.list_quotations(customer_id)
return QuotationListResponse(quotations=quotations, total=len(quotations))
@router.get("/{quotation_id}/pdf")
async def proxy_quotation_pdf(
quotation_id: str,
_user: TokenPayload = Depends(require_permission("crm", "view")),
):
"""Proxy the quotation PDF from Nextcloud to bypass browser cookie restrictions."""
pdf_bytes = await svc.get_quotation_pdf_bytes(quotation_id)
return StreamingResponse(
io.BytesIO(pdf_bytes),
media_type="application/pdf",
headers={"Content-Disposition": "inline"},
)
@router.get("/{quotation_id}", response_model=QuotationInDB)
async def get_quotation(
quotation_id: str,
_user: TokenPayload = Depends(require_permission("crm", "view")),
):
return await svc.get_quotation(quotation_id)
@router.post("", response_model=QuotationInDB, status_code=201)
async def create_quotation(
body: QuotationCreate,
generate_pdf: bool = Query(False),
_user: TokenPayload = Depends(require_permission("crm", "edit")),
):
"""
Create a quotation. Pass ?generate_pdf=true to immediately generate and upload the PDF.
"""
return await svc.create_quotation(body, generate_pdf=generate_pdf)
@router.put("/{quotation_id}", response_model=QuotationInDB)
async def update_quotation(
quotation_id: str,
body: QuotationUpdate,
generate_pdf: bool = Query(False),
_user: TokenPayload = Depends(require_permission("crm", "edit")),
):
"""
Update a quotation. Pass ?generate_pdf=true to regenerate the PDF.
"""
return await svc.update_quotation(quotation_id, body, generate_pdf=generate_pdf)
@router.delete("/{quotation_id}", status_code=204)
async def delete_quotation(
quotation_id: str,
_user: TokenPayload = Depends(require_permission("crm", "edit")),
):
await svc.delete_quotation(quotation_id)
@router.post("/{quotation_id}/regenerate-pdf", response_model=QuotationInDB)
async def regenerate_pdf(
quotation_id: str,
_user: TokenPayload = Depends(require_permission("crm", "edit")),
):
"""Force PDF regeneration and re-upload to Nextcloud."""
return await svc.regenerate_pdf(quotation_id)

View File

@@ -0,0 +1,494 @@
import json
import logging
import os
import uuid
from datetime import datetime
from decimal import Decimal, ROUND_HALF_UP
from pathlib import Path
from typing import Optional
from fastapi import HTTPException
from crm import nextcloud
from crm.quotation_models import (
QuotationCreate,
QuotationInDB,
QuotationItemCreate,
QuotationItemInDB,
QuotationListItem,
QuotationUpdate,
)
from crm.service import get_customer
from mqtt import database as mqtt_db
logger = logging.getLogger(__name__)
# Path to Jinja2 templates directory (relative to this file)
_TEMPLATES_DIR = Path(__file__).parent.parent / "templates"
# ── Helpers ───────────────────────────────────────────────────────────────────
def _d(value) -> Decimal:
"""Convert to Decimal safely."""
return Decimal(str(value if value is not None else 0))
def _float(d: Decimal) -> float:
"""Round Decimal to 2dp and return as float for storage."""
return float(d.quantize(Decimal("0.01"), rounding=ROUND_HALF_UP))
def _calculate_totals(
items: list,
global_discount_percent: float,
shipping_cost: float,
shipping_cost_discount: float,
install_cost: float,
install_cost_discount: float,
extras_cost: float,
) -> dict:
"""
Calculate all monetary totals using Decimal arithmetic (ROUND_HALF_UP).
VAT is computed per-item from each item's vat_percent field.
Shipping and install costs carry 0% VAT.
Returns a dict of floats ready for DB storage.
"""
# Per-line totals and per-item VAT
item_totals = []
item_vat = Decimal(0)
for item in items:
cost = _d(item.get("unit_cost", 0))
qty = _d(item.get("quantity", 1))
disc = _d(item.get("discount_percent", 0))
net = cost * qty * (1 - disc / 100)
item_totals.append(net)
vat_pct = _d(item.get("vat_percent", 24))
item_vat += net * (vat_pct / 100)
# Shipping net (VAT = 0%)
ship_gross = _d(shipping_cost)
ship_disc = _d(shipping_cost_discount)
ship_net = ship_gross * (1 - ship_disc / 100)
# Install net (VAT = 0%)
install_gross = _d(install_cost)
install_disc = _d(install_cost_discount)
install_net = install_gross * (1 - install_disc / 100)
subtotal = sum(item_totals, Decimal(0)) + ship_net + install_net
global_disc_pct = _d(global_discount_percent)
global_disc_amount = subtotal * (global_disc_pct / 100)
new_subtotal = subtotal - global_disc_amount
# Global discount proportionally reduces VAT too
if subtotal > 0:
disc_ratio = new_subtotal / subtotal
vat_amount = item_vat * disc_ratio
else:
vat_amount = Decimal(0)
extras = _d(extras_cost)
final_total = new_subtotal + vat_amount + extras
return {
"subtotal_before_discount": _float(subtotal),
"global_discount_amount": _float(global_disc_amount),
"new_subtotal": _float(new_subtotal),
"vat_amount": _float(vat_amount),
"final_total": _float(final_total),
}
def _calc_line_total(item) -> float:
cost = _d(item.get("unit_cost", 0))
qty = _d(item.get("quantity", 1))
disc = _d(item.get("discount_percent", 0))
return _float(cost * qty * (1 - disc / 100))
async def _generate_quotation_number(db) -> str:
year = datetime.utcnow().year
prefix = f"QT-{year}-"
rows = await db.execute_fetchall(
"SELECT quotation_number FROM crm_quotations WHERE quotation_number LIKE ? ORDER BY quotation_number DESC LIMIT 1",
(f"{prefix}%",),
)
if rows:
last_num = rows[0][0] # e.g. "QT-2026-012"
try:
seq = int(last_num[len(prefix):]) + 1
except ValueError:
seq = 1
else:
seq = 1
return f"{prefix}{seq:03d}"
def _row_to_quotation(row: dict, items: list[dict]) -> QuotationInDB:
row = dict(row)
row["comments"] = json.loads(row.get("comments") or "[]")
row["quick_notes"] = json.loads(row.get("quick_notes") or "{}")
item_models = [QuotationItemInDB(**{k: v for k, v in i.items() if k in QuotationItemInDB.model_fields}) for i in items]
return QuotationInDB(**{k: v for k, v in row.items() if k in QuotationInDB.model_fields}, items=item_models)
def _row_to_list_item(row: dict) -> QuotationListItem:
return QuotationListItem(**{k: v for k, v in dict(row).items() if k in QuotationListItem.model_fields})
async def _fetch_items(db, quotation_id: str) -> list[dict]:
rows = await db.execute_fetchall(
"SELECT * FROM crm_quotation_items WHERE quotation_id = ? ORDER BY sort_order ASC",
(quotation_id,),
)
return [dict(r) for r in rows]
# ── Public API ────────────────────────────────────────────────────────────────
async def get_next_number() -> str:
db = await mqtt_db.get_db()
return await _generate_quotation_number(db)
async def list_quotations(customer_id: str) -> list[QuotationListItem]:
db = await mqtt_db.get_db()
rows = await db.execute_fetchall(
"SELECT id, quotation_number, title, customer_id, status, final_total, created_at, updated_at, nextcloud_pdf_url "
"FROM crm_quotations WHERE customer_id = ? ORDER BY created_at DESC",
(customer_id,),
)
return [_row_to_list_item(dict(r)) for r in rows]
async def get_quotation(quotation_id: str) -> QuotationInDB:
db = await mqtt_db.get_db()
rows = await db.execute_fetchall(
"SELECT * FROM crm_quotations WHERE id = ?", (quotation_id,)
)
if not rows:
raise HTTPException(status_code=404, detail="Quotation not found")
items = await _fetch_items(db, quotation_id)
return _row_to_quotation(dict(rows[0]), items)
async def create_quotation(data: QuotationCreate, generate_pdf: bool = False) -> QuotationInDB:
db = await mqtt_db.get_db()
now = datetime.utcnow().isoformat()
qid = str(uuid.uuid4())
quotation_number = await _generate_quotation_number(db)
# Build items list for calculation
items_raw = [item.model_dump() for item in data.items]
# Calculate per-item line totals
for item in items_raw:
item["line_total"] = _calc_line_total(item)
totals = _calculate_totals(
items_raw,
data.global_discount_percent,
data.shipping_cost,
data.shipping_cost_discount,
data.install_cost,
data.install_cost_discount,
data.extras_cost,
)
comments_json = json.dumps(data.comments)
quick_notes_json = json.dumps(data.quick_notes or {})
await db.execute(
"""INSERT INTO crm_quotations (
id, quotation_number, title, subtitle, customer_id,
language, status, order_type, shipping_method, estimated_shipping_date,
global_discount_label, global_discount_percent,
shipping_cost, shipping_cost_discount, install_cost, install_cost_discount,
extras_label, extras_cost, comments, quick_notes,
subtotal_before_discount, global_discount_amount, new_subtotal, vat_amount, final_total,
nextcloud_pdf_path, nextcloud_pdf_url,
client_org, client_name, client_location, client_phone, client_email,
created_at, updated_at
) VALUES (
?, ?, ?, ?, ?,
?, 'draft', ?, ?, ?,
?, ?,
?, ?, ?, ?,
?, ?, ?, ?,
?, ?, ?, ?, ?,
NULL, NULL,
?, ?, ?, ?, ?,
?, ?
)""",
(
qid, quotation_number, data.title, data.subtitle, data.customer_id,
data.language, data.order_type, data.shipping_method, data.estimated_shipping_date,
data.global_discount_label, data.global_discount_percent,
data.shipping_cost, data.shipping_cost_discount, data.install_cost, data.install_cost_discount,
data.extras_label, data.extras_cost, comments_json, quick_notes_json,
totals["subtotal_before_discount"], totals["global_discount_amount"],
totals["new_subtotal"], totals["vat_amount"], totals["final_total"],
data.client_org, data.client_name, data.client_location, data.client_phone, data.client_email,
now, now,
),
)
# Insert items
for i, item in enumerate(items_raw):
item_id = str(uuid.uuid4())
await db.execute(
"""INSERT INTO crm_quotation_items
(id, quotation_id, product_id, description, unit_type, unit_cost,
discount_percent, quantity, vat_percent, line_total, sort_order)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
(
item_id, qid, item.get("product_id"), item.get("description"),
item.get("unit_type", "pcs"), item.get("unit_cost", 0),
item.get("discount_percent", 0), item.get("quantity", 1),
item.get("vat_percent", 24), item["line_total"], item.get("sort_order", i),
),
)
await db.commit()
quotation = await get_quotation(qid)
if generate_pdf:
quotation = await _do_generate_and_upload_pdf(quotation)
return quotation
async def update_quotation(quotation_id: str, data: QuotationUpdate, generate_pdf: bool = False) -> QuotationInDB:
db = await mqtt_db.get_db()
rows = await db.execute_fetchall(
"SELECT * FROM crm_quotations WHERE id = ?", (quotation_id,)
)
if not rows:
raise HTTPException(status_code=404, detail="Quotation not found")
existing = dict(rows[0])
now = datetime.utcnow().isoformat()
# Merge update into existing values
update_fields = data.model_dump(exclude_none=True)
# Build SET clause — handle comments JSON separately
set_parts = []
params = []
scalar_fields = [
"title", "subtitle", "language", "status", "order_type", "shipping_method",
"estimated_shipping_date", "global_discount_label", "global_discount_percent",
"shipping_cost", "shipping_cost_discount", "install_cost",
"install_cost_discount", "extras_label", "extras_cost",
"client_org", "client_name", "client_location", "client_phone", "client_email",
]
for field in scalar_fields:
if field in update_fields:
set_parts.append(f"{field} = ?")
params.append(update_fields[field])
if "comments" in update_fields:
set_parts.append("comments = ?")
params.append(json.dumps(update_fields["comments"]))
if "quick_notes" in update_fields:
set_parts.append("quick_notes = ?")
params.append(json.dumps(update_fields["quick_notes"] or {}))
# Recalculate totals using merged values
merged = {**existing, **{k: update_fields.get(k, existing.get(k)) for k in scalar_fields}}
# If items are being updated, recalculate with new items; otherwise use existing items
if "items" in update_fields:
items_raw = [item.model_dump() for item in data.items]
for item in items_raw:
item["line_total"] = _calc_line_total(item)
else:
existing_items = await _fetch_items(db, quotation_id)
items_raw = existing_items
totals = _calculate_totals(
items_raw,
float(merged.get("global_discount_percent", 0)),
float(merged.get("shipping_cost", 0)),
float(merged.get("shipping_cost_discount", 0)),
float(merged.get("install_cost", 0)),
float(merged.get("install_cost_discount", 0)),
float(merged.get("extras_cost", 0)),
)
for field, val in totals.items():
set_parts.append(f"{field} = ?")
params.append(val)
set_parts.append("updated_at = ?")
params.append(now)
params.append(quotation_id)
if set_parts:
await db.execute(
f"UPDATE crm_quotations SET {', '.join(set_parts)} WHERE id = ?",
params,
)
# Replace items if provided
if "items" in update_fields:
await db.execute("DELETE FROM crm_quotation_items WHERE quotation_id = ?", (quotation_id,))
for i, item in enumerate(items_raw):
item_id = str(uuid.uuid4())
await db.execute(
"""INSERT INTO crm_quotation_items
(id, quotation_id, product_id, description, unit_type, unit_cost,
discount_percent, quantity, vat_percent, line_total, sort_order)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
(
item_id, quotation_id, item.get("product_id"), item.get("description"),
item.get("unit_type", "pcs"), item.get("unit_cost", 0),
item.get("discount_percent", 0), item.get("quantity", 1),
item.get("vat_percent", 24), item["line_total"], item.get("sort_order", i),
),
)
await db.commit()
quotation = await get_quotation(quotation_id)
if generate_pdf:
quotation = await _do_generate_and_upload_pdf(quotation)
return quotation
async def delete_quotation(quotation_id: str) -> None:
db = await mqtt_db.get_db()
rows = await db.execute_fetchall(
"SELECT nextcloud_pdf_path FROM crm_quotations WHERE id = ?", (quotation_id,)
)
if not rows:
raise HTTPException(status_code=404, detail="Quotation not found")
pdf_path = dict(rows[0]).get("nextcloud_pdf_path")
await db.execute("DELETE FROM crm_quotation_items WHERE quotation_id = ?", (quotation_id,))
await db.execute("DELETE FROM crm_quotations WHERE id = ?", (quotation_id,))
await db.commit()
# Remove PDF from Nextcloud (best-effort)
if pdf_path:
try:
await nextcloud.delete_file(pdf_path)
except Exception as e:
logger.warning("Failed to delete PDF from Nextcloud (%s): %s", pdf_path, e)
# ── PDF Generation ─────────────────────────────────────────────────────────────
async def _do_generate_and_upload_pdf(quotation: QuotationInDB) -> QuotationInDB:
"""Generate PDF, upload to Nextcloud, update DB record. Returns updated quotation."""
try:
customer = get_customer(quotation.customer_id)
except Exception as e:
logger.error("Cannot generate PDF — customer not found: %s", e)
return quotation
try:
pdf_bytes = await _generate_pdf_bytes(quotation, customer)
except Exception as e:
logger.error("PDF generation failed for quotation %s: %s", quotation.id, e)
return quotation
# Delete old PDF if present
if quotation.nextcloud_pdf_path:
try:
await nextcloud.delete_file(quotation.nextcloud_pdf_path)
except Exception:
pass
try:
pdf_path, pdf_url = await _upload_pdf(customer, quotation, pdf_bytes)
except Exception as e:
logger.error("PDF upload failed for quotation %s: %s", quotation.id, e)
return quotation
# Persist paths
db = await mqtt_db.get_db()
await db.execute(
"UPDATE crm_quotations SET nextcloud_pdf_path = ?, nextcloud_pdf_url = ? WHERE id = ?",
(pdf_path, pdf_url, quotation.id),
)
await db.commit()
return await get_quotation(quotation.id)
async def _generate_pdf_bytes(quotation: QuotationInDB, customer) -> bytes:
"""Render Jinja2 template and convert to PDF via WeasyPrint."""
from jinja2 import Environment, FileSystemLoader, select_autoescape
import weasyprint
env = Environment(
loader=FileSystemLoader(str(_TEMPLATES_DIR)),
autoescape=select_autoescape(["html"]),
)
def format_money(value):
try:
f = float(value)
# Greek-style: dot thousands separator, comma decimal
formatted = f"{f:,.2f}".replace(",", "X").replace(".", ",").replace("X", ".")
return f"{formatted}"
except (TypeError, ValueError):
return "0,00 €"
env.filters["format_money"] = format_money
template = env.get_template("quotation.html")
html_str = template.render(
quotation=quotation,
customer=customer,
lang=quotation.language,
)
pdf = weasyprint.HTML(string=html_str, base_url=str(_TEMPLATES_DIR)).write_pdf()
return pdf
async def _upload_pdf(customer, quotation: QuotationInDB, pdf_bytes: bytes) -> tuple[str, str]:
"""Upload PDF to Nextcloud, return (relative_path, public_url)."""
from crm.service import get_customer_nc_path
from config import settings
nc_folder = get_customer_nc_path(customer)
date_str = datetime.utcnow().strftime("%Y-%m-%d")
filename = f"Quotation-{quotation.quotation_number}-{date_str}.pdf"
rel_path = f"customers/{nc_folder}/quotations/{filename}"
await nextcloud.ensure_folder(f"customers/{nc_folder}/quotations")
await nextcloud.upload_file(rel_path, pdf_bytes, "application/pdf")
# Construct a direct WebDAV download URL
from crm.nextcloud import _full_url
pdf_url = _full_url(rel_path)
return rel_path, pdf_url
async def regenerate_pdf(quotation_id: str) -> QuotationInDB:
quotation = await get_quotation(quotation_id)
return await _do_generate_and_upload_pdf(quotation)
async def get_quotation_pdf_bytes(quotation_id: str) -> bytes:
"""Download the PDF for a quotation from Nextcloud and return raw bytes."""
from fastapi import HTTPException
quotation = await get_quotation(quotation_id)
if not quotation.nextcloud_pdf_path:
raise HTTPException(status_code=404, detail="No PDF generated for this quotation")
pdf_bytes, _ = await nextcloud.download_file(quotation.nextcloud_pdf_path)
return pdf_bytes

93
backend/crm/router.py Normal file
View File

@@ -0,0 +1,93 @@
from fastapi import APIRouter, Depends, Query, UploadFile, File, HTTPException
from fastapi.responses import FileResponse
from typing import Optional
import os
import shutil
from auth.models import TokenPayload
from auth.dependencies import require_permission
from crm.models import ProductCreate, ProductUpdate, ProductInDB, ProductListResponse
from crm import service
router = APIRouter(prefix="/api/crm/products", tags=["crm-products"])
PHOTO_DIR = os.path.join(os.path.dirname(__file__), "..", "storage", "product_images")
os.makedirs(PHOTO_DIR, exist_ok=True)
@router.get("", response_model=ProductListResponse)
def list_products(
search: Optional[str] = Query(None),
category: Optional[str] = Query(None),
active_only: bool = Query(False),
_user: TokenPayload = Depends(require_permission("crm", "view")),
):
products = service.list_products(search=search, category=category, active_only=active_only)
return ProductListResponse(products=products, total=len(products))
@router.get("/{product_id}", response_model=ProductInDB)
def get_product(
product_id: str,
_user: TokenPayload = Depends(require_permission("crm", "view")),
):
return service.get_product(product_id)
@router.post("", response_model=ProductInDB, status_code=201)
def create_product(
body: ProductCreate,
_user: TokenPayload = Depends(require_permission("crm", "edit")),
):
return service.create_product(body)
@router.put("/{product_id}", response_model=ProductInDB)
def update_product(
product_id: str,
body: ProductUpdate,
_user: TokenPayload = Depends(require_permission("crm", "edit")),
):
return service.update_product(product_id, body)
@router.delete("/{product_id}", status_code=204)
def delete_product(
product_id: str,
_user: TokenPayload = Depends(require_permission("crm", "edit")),
):
service.delete_product(product_id)
@router.post("/{product_id}/photo", response_model=ProductInDB)
async def upload_product_photo(
product_id: str,
file: UploadFile = File(...),
_user: TokenPayload = Depends(require_permission("crm", "edit")),
):
"""Upload a product photo. Accepts JPG or PNG, stored on disk."""
if file.content_type not in ("image/jpeg", "image/png", "image/webp"):
raise HTTPException(status_code=400, detail="Only JPG, PNG, or WebP images are accepted.")
ext = {"image/jpeg": "jpg", "image/png": "png", "image/webp": "webp"}.get(file.content_type, "jpg")
photo_path = os.path.join(PHOTO_DIR, f"{product_id}.{ext}")
# Remove any old photo files for this product
for old_ext in ("jpg", "png", "webp"):
old_path = os.path.join(PHOTO_DIR, f"{product_id}.{old_ext}")
if os.path.exists(old_path) and old_path != photo_path:
os.remove(old_path)
with open(photo_path, "wb") as f:
shutil.copyfileobj(file.file, f)
photo_url = f"/crm/products/{product_id}/photo"
return service.update_product(product_id, ProductUpdate(photo_url=photo_url))
@router.get("/{product_id}/photo")
def get_product_photo(
product_id: str,
):
"""Serve a product photo from disk."""
for ext in ("jpg", "png", "webp"):
photo_path = os.path.join(PHOTO_DIR, f"{product_id}.{ext}")
if os.path.exists(photo_path):
return FileResponse(photo_path)
raise HTTPException(status_code=404, detail="No photo found for this product.")

619
backend/crm/service.py Normal file
View File

@@ -0,0 +1,619 @@
import json
import uuid
from datetime import datetime
from fastapi import HTTPException
from shared.firebase import get_db
from shared.exceptions import NotFoundError
import re as _re
from mqtt import database as mqtt_db
from crm.models import (
ProductCreate, ProductUpdate, ProductInDB,
CustomerCreate, CustomerUpdate, CustomerInDB,
OrderCreate, OrderUpdate, OrderInDB,
CommCreate, CommUpdate, CommInDB,
MediaCreate, MediaInDB,
)
COLLECTION = "crm_products"
def _doc_to_product(doc) -> ProductInDB:
data = doc.to_dict()
return ProductInDB(id=doc.id, **data)
def list_products(
search: str | None = None,
category: str | None = None,
active_only: bool = False,
) -> list[ProductInDB]:
db = get_db()
query = db.collection(COLLECTION)
if active_only:
query = query.where("active", "==", True)
if category:
query = query.where("category", "==", category)
results = []
for doc in query.stream():
product = _doc_to_product(doc)
if search:
s = search.lower()
if not (
s in (product.name or "").lower()
or s in (product.sku or "").lower()
or s in (product.description or "").lower()
):
continue
results.append(product)
return results
def get_product(product_id: str) -> ProductInDB:
db = get_db()
doc = db.collection(COLLECTION).document(product_id).get()
if not doc.exists:
raise NotFoundError("Product")
return _doc_to_product(doc)
def create_product(data: ProductCreate) -> ProductInDB:
db = get_db()
now = datetime.utcnow().isoformat()
product_id = str(uuid.uuid4())
doc_data = data.model_dump()
doc_data["created_at"] = now
doc_data["updated_at"] = now
# Serialize nested enums/models
if doc_data.get("category"):
doc_data["category"] = doc_data["category"].value if hasattr(doc_data["category"], "value") else doc_data["category"]
if doc_data.get("costs") and hasattr(doc_data["costs"], "model_dump"):
doc_data["costs"] = doc_data["costs"].model_dump()
if doc_data.get("stock") and hasattr(doc_data["stock"], "model_dump"):
doc_data["stock"] = doc_data["stock"].model_dump()
db.collection(COLLECTION).document(product_id).set(doc_data)
return ProductInDB(id=product_id, **doc_data)
def update_product(product_id: str, data: ProductUpdate) -> ProductInDB:
db = get_db()
doc_ref = db.collection(COLLECTION).document(product_id)
doc = doc_ref.get()
if not doc.exists:
raise NotFoundError("Product")
update_data = data.model_dump(exclude_none=True)
update_data["updated_at"] = datetime.utcnow().isoformat()
if "category" in update_data and hasattr(update_data["category"], "value"):
update_data["category"] = update_data["category"].value
if "costs" in update_data and hasattr(update_data["costs"], "model_dump"):
update_data["costs"] = update_data["costs"].model_dump()
if "stock" in update_data and hasattr(update_data["stock"], "model_dump"):
update_data["stock"] = update_data["stock"].model_dump()
doc_ref.update(update_data)
updated_doc = doc_ref.get()
return _doc_to_product(updated_doc)
def delete_product(product_id: str) -> None:
db = get_db()
doc_ref = db.collection(COLLECTION).document(product_id)
doc = doc_ref.get()
if not doc.exists:
raise NotFoundError("Product")
doc_ref.delete()
# ── Customers ────────────────────────────────────────────────────────────────
CUSTOMERS_COLLECTION = "crm_customers"
def _doc_to_customer(doc) -> CustomerInDB:
data = doc.to_dict()
return CustomerInDB(id=doc.id, **data)
def list_customers(
search: str | None = None,
tag: str | None = None,
) -> list[CustomerInDB]:
db = get_db()
query = db.collection(CUSTOMERS_COLLECTION)
if tag:
query = query.where("tags", "array_contains", tag)
results = []
for doc in query.stream():
customer = _doc_to_customer(doc)
if search:
s = search.lower()
name_match = s in (customer.name or "").lower()
surname_match = s in (customer.surname or "").lower()
org_match = s in (customer.organization or "").lower()
contact_match = any(
s in (c.value or "").lower()
for c in (customer.contacts or [])
)
loc = customer.location or {}
loc_match = (
s in (loc.get("city", "") or "").lower() or
s in (loc.get("country", "") or "").lower() or
s in (loc.get("region", "") or "").lower()
)
tag_match = any(s in (t or "").lower() for t in (customer.tags or []))
if not (name_match or surname_match or org_match or contact_match or loc_match or tag_match):
continue
results.append(customer)
return results
def get_customer(customer_id: str) -> CustomerInDB:
db = get_db()
doc = db.collection(CUSTOMERS_COLLECTION).document(customer_id).get()
if not doc.exists:
raise NotFoundError("Customer")
return _doc_to_customer(doc)
def get_customer_nc_path(customer: CustomerInDB) -> str:
"""Return the Nextcloud folder slug for a customer. Falls back to UUID for legacy records."""
return customer.folder_id if customer.folder_id else customer.id
def create_customer(data: CustomerCreate) -> CustomerInDB:
db = get_db()
# Validate folder_id
if not data.folder_id or not data.folder_id.strip():
raise HTTPException(status_code=422, detail="Internal Folder ID is required.")
folder_id = data.folder_id.strip().lower()
if not _re.match(r'^[a-z0-9][a-z0-9\-]*[a-z0-9]$', folder_id):
raise HTTPException(
status_code=422,
detail="Internal Folder ID must contain only lowercase letters, numbers, and hyphens, and cannot start or end with a hyphen.",
)
# Check uniqueness
existing = list(db.collection(CUSTOMERS_COLLECTION).where("folder_id", "==", folder_id).limit(1).stream())
if existing:
raise HTTPException(status_code=409, detail=f"A customer with folder ID '{folder_id}' already exists.")
now = datetime.utcnow().isoformat()
customer_id = str(uuid.uuid4())
doc_data = data.model_dump()
doc_data["folder_id"] = folder_id
doc_data["created_at"] = now
doc_data["updated_at"] = now
db.collection(CUSTOMERS_COLLECTION).document(customer_id).set(doc_data)
return CustomerInDB(id=customer_id, **doc_data)
def update_customer(customer_id: str, data: CustomerUpdate) -> CustomerInDB:
db = get_db()
doc_ref = db.collection(CUSTOMERS_COLLECTION).document(customer_id)
doc = doc_ref.get()
if not doc.exists:
raise NotFoundError("Customer")
update_data = data.model_dump(exclude_none=True)
update_data["updated_at"] = datetime.utcnow().isoformat()
doc_ref.update(update_data)
updated_doc = doc_ref.get()
return _doc_to_customer(updated_doc)
def delete_customer(customer_id: str) -> None:
db = get_db()
doc_ref = db.collection(CUSTOMERS_COLLECTION).document(customer_id)
doc = doc_ref.get()
if not doc.exists:
raise NotFoundError("Customer")
doc_ref.delete()
# ── Orders ───────────────────────────────────────────────────────────────────
ORDERS_COLLECTION = "crm_orders"
def _doc_to_order(doc) -> OrderInDB:
data = doc.to_dict()
return OrderInDB(id=doc.id, **data)
def _generate_order_number(db) -> str:
year = datetime.utcnow().year
prefix = f"ORD-{year}-"
max_n = 0
for doc in db.collection(ORDERS_COLLECTION).stream():
data = doc.to_dict()
num = data.get("order_number", "")
if num and num.startswith(prefix):
try:
n = int(num[len(prefix):])
if n > max_n:
max_n = n
except ValueError:
pass
return f"{prefix}{max_n + 1:03d}"
def list_orders(
customer_id: str | None = None,
status: str | None = None,
payment_status: str | None = None,
) -> list[OrderInDB]:
db = get_db()
query = db.collection(ORDERS_COLLECTION)
if customer_id:
query = query.where("customer_id", "==", customer_id)
if status:
query = query.where("status", "==", status)
if payment_status:
query = query.where("payment_status", "==", payment_status)
return [_doc_to_order(doc) for doc in query.stream()]
def get_order(order_id: str) -> OrderInDB:
db = get_db()
doc = db.collection(ORDERS_COLLECTION).document(order_id).get()
if not doc.exists:
raise NotFoundError("Order")
return _doc_to_order(doc)
def create_order(data: OrderCreate) -> OrderInDB:
db = get_db()
now = datetime.utcnow().isoformat()
order_id = str(uuid.uuid4())
doc_data = data.model_dump()
if not doc_data.get("order_number"):
doc_data["order_number"] = _generate_order_number(db)
doc_data["created_at"] = now
doc_data["updated_at"] = now
db.collection(ORDERS_COLLECTION).document(order_id).set(doc_data)
return OrderInDB(id=order_id, **doc_data)
def update_order(order_id: str, data: OrderUpdate) -> OrderInDB:
db = get_db()
doc_ref = db.collection(ORDERS_COLLECTION).document(order_id)
doc = doc_ref.get()
if not doc.exists:
raise NotFoundError("Order")
update_data = data.model_dump(exclude_none=True)
update_data["updated_at"] = datetime.utcnow().isoformat()
doc_ref.update(update_data)
updated_doc = doc_ref.get()
return _doc_to_order(updated_doc)
def delete_order(order_id: str) -> None:
db = get_db()
doc_ref = db.collection(ORDERS_COLLECTION).document(order_id)
doc = doc_ref.get()
if not doc.exists:
raise NotFoundError("Order")
doc_ref.delete()
# ── Comms Log (SQLite, async) ─────────────────────────────────────────────────
def _row_to_comm(row: dict) -> CommInDB:
row = dict(row)
raw_attachments = json.loads(row.get("attachments") or "[]")
# Normalise attachment dicts — tolerate both synced (content_type/size) and
# sent (nextcloud_path) shapes so Pydantic never sees missing required fields.
row["attachments"] = [
{k: v for k, v in a.items() if k in ("filename", "nextcloud_path", "content_type", "size")}
for a in raw_attachments if isinstance(a, dict) and a.get("filename")
]
if row.get("to_addrs") and isinstance(row["to_addrs"], str):
try:
row["to_addrs"] = json.loads(row["to_addrs"])
except Exception:
row["to_addrs"] = []
# SQLite stores booleans as integers
row["is_important"] = bool(row.get("is_important", 0))
row["is_read"] = bool(row.get("is_read", 0))
return CommInDB(**{k: v for k, v in row.items() if k in CommInDB.model_fields})
async def list_comms(
customer_id: str,
type: str | None = None,
direction: str | None = None,
limit: int = 100,
) -> list[CommInDB]:
db = await mqtt_db.get_db()
where = ["customer_id = ?"]
params: list = [customer_id]
if type:
where.append("type = ?")
params.append(type)
if direction:
where.append("direction = ?")
params.append(direction)
clause = " AND ".join(where)
rows = await db.execute_fetchall(
f"SELECT * FROM crm_comms_log WHERE {clause} ORDER BY COALESCE(occurred_at, created_at) DESC, created_at DESC LIMIT ?",
params + [limit],
)
entries = [_row_to_comm(dict(r)) for r in rows]
# Fallback: include unlinked email rows (customer_id NULL) if addresses match this customer.
# This covers historical rows created before automatic outbound customer linking.
fs = get_db()
doc = fs.collection("crm_customers").document(customer_id).get()
if doc.exists:
data = doc.to_dict() or {}
customer_emails = {
(c.get("value") or "").strip().lower()
for c in (data.get("contacts") or [])
if c.get("type") == "email" and c.get("value")
}
else:
customer_emails = set()
if customer_emails:
extra_where = [
"type = 'email'",
"(customer_id IS NULL OR customer_id = '')",
]
extra_params: list = []
if direction:
extra_where.append("direction = ?")
extra_params.append(direction)
extra_clause = " AND ".join(extra_where)
extra_rows = await db.execute_fetchall(
f"SELECT * FROM crm_comms_log WHERE {extra_clause} "
"ORDER BY COALESCE(occurred_at, created_at) DESC, created_at DESC LIMIT ?",
extra_params + [max(limit, 300)],
)
for r in extra_rows:
e = _row_to_comm(dict(r))
from_addr = (e.from_addr or "").strip().lower()
to_addrs = [(a or "").strip().lower() for a in (e.to_addrs or [])]
matched = (from_addr in customer_emails) or any(a in customer_emails for a in to_addrs)
if matched:
entries.append(e)
# De-duplicate and sort consistently
uniq = {e.id: e for e in entries}
sorted_entries = sorted(
uniq.values(),
key=lambda e: ((e.occurred_at or e.created_at or ""), (e.created_at or ""), (e.id or "")),
reverse=True,
)
return sorted_entries[:limit]
async def list_all_emails(
direction: str | None = None,
customers_only: bool = False,
mail_accounts: list[str] | None = None,
limit: int = 500,
) -> list[CommInDB]:
db = await mqtt_db.get_db()
where = ["type = 'email'"]
params: list = []
if direction:
where.append("direction = ?")
params.append(direction)
if customers_only:
where.append("customer_id IS NOT NULL")
if mail_accounts:
placeholders = ",".join("?" for _ in mail_accounts)
where.append(f"mail_account IN ({placeholders})")
params.extend(mail_accounts)
clause = f"WHERE {' AND '.join(where)}"
rows = await db.execute_fetchall(
f"SELECT * FROM crm_comms_log {clause} ORDER BY COALESCE(occurred_at, created_at) DESC, created_at DESC LIMIT ?",
params + [limit],
)
return [_row_to_comm(dict(r)) for r in rows]
async def list_all_comms(
type: str | None = None,
direction: str | None = None,
limit: int = 200,
) -> list[CommInDB]:
db = await mqtt_db.get_db()
where = []
params: list = []
if type:
where.append("type = ?")
params.append(type)
if direction:
where.append("direction = ?")
params.append(direction)
clause = f"WHERE {' AND '.join(where)}" if where else ""
rows = await db.execute_fetchall(
f"SELECT * FROM crm_comms_log {clause} ORDER BY COALESCE(occurred_at, created_at) DESC, created_at DESC LIMIT ?",
params + [limit],
)
return [_row_to_comm(dict(r)) for r in rows]
async def get_comm(comm_id: str) -> CommInDB:
db = await mqtt_db.get_db()
rows = await db.execute_fetchall(
"SELECT * FROM crm_comms_log WHERE id = ?", (comm_id,)
)
if not rows:
raise HTTPException(status_code=404, detail="Comm entry not found")
return _row_to_comm(dict(rows[0]))
async def create_comm(data: CommCreate) -> CommInDB:
db = await mqtt_db.get_db()
now = datetime.utcnow().isoformat()
comm_id = str(uuid.uuid4())
occurred_at = data.occurred_at or now
attachments_json = json.dumps([a.model_dump() for a in data.attachments])
await db.execute(
"""INSERT INTO crm_comms_log
(id, customer_id, type, mail_account, direction, subject, body, attachments,
ext_message_id, logged_by, occurred_at, created_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
(comm_id, data.customer_id, data.type.value, data.mail_account, data.direction.value,
data.subject, data.body, attachments_json,
data.ext_message_id, data.logged_by, occurred_at, now),
)
await db.commit()
return await get_comm(comm_id)
async def update_comm(comm_id: str, data: CommUpdate) -> CommInDB:
db = await mqtt_db.get_db()
rows = await db.execute_fetchall(
"SELECT id FROM crm_comms_log WHERE id = ?", (comm_id,)
)
if not rows:
raise HTTPException(status_code=404, detail="Comm entry not found")
updates = data.model_dump(exclude_none=True)
if not updates:
return await get_comm(comm_id)
set_clause = ", ".join(f"{k} = ?" for k in updates)
await db.execute(
f"UPDATE crm_comms_log SET {set_clause} WHERE id = ?",
list(updates.values()) + [comm_id],
)
await db.commit()
return await get_comm(comm_id)
async def delete_comm(comm_id: str) -> None:
db = await mqtt_db.get_db()
rows = await db.execute_fetchall(
"SELECT id FROM crm_comms_log WHERE id = ?", (comm_id,)
)
if not rows:
raise HTTPException(status_code=404, detail="Comm entry not found")
await db.execute("DELETE FROM crm_comms_log WHERE id = ?", (comm_id,))
await db.commit()
async def delete_comms_bulk(ids: list[str]) -> int:
"""Delete multiple comm entries. Returns count deleted."""
if not ids:
return 0
db = await mqtt_db.get_db()
placeholders = ",".join("?" for _ in ids)
cursor = await db.execute(
f"DELETE FROM crm_comms_log WHERE id IN ({placeholders})", ids
)
await db.commit()
return cursor.rowcount
async def set_comm_important(comm_id: str, important: bool) -> CommInDB:
db = await mqtt_db.get_db()
await db.execute(
"UPDATE crm_comms_log SET is_important = ? WHERE id = ?",
(1 if important else 0, comm_id),
)
await db.commit()
return await get_comm(comm_id)
async def set_comm_read(comm_id: str, read: bool) -> CommInDB:
db = await mqtt_db.get_db()
await db.execute(
"UPDATE crm_comms_log SET is_read = ? WHERE id = ?",
(1 if read else 0, comm_id),
)
await db.commit()
return await get_comm(comm_id)
# ── Media (SQLite, async) ─────────────────────────────────────────────────────
def _row_to_media(row: dict) -> MediaInDB:
row = dict(row)
row["tags"] = json.loads(row.get("tags") or "[]")
return MediaInDB(**row)
async def list_media(
customer_id: str | None = None,
order_id: str | None = None,
) -> list[MediaInDB]:
db = await mqtt_db.get_db()
where = []
params: list = []
if customer_id:
where.append("customer_id = ?")
params.append(customer_id)
if order_id:
where.append("order_id = ?")
params.append(order_id)
clause = f"WHERE {' AND '.join(where)}" if where else ""
rows = await db.execute_fetchall(
f"SELECT * FROM crm_media {clause} ORDER BY created_at DESC",
params,
)
return [_row_to_media(dict(r)) for r in rows]
async def create_media(data: MediaCreate) -> MediaInDB:
db = await mqtt_db.get_db()
now = datetime.utcnow().isoformat()
media_id = str(uuid.uuid4())
tags_json = json.dumps(data.tags)
direction = data.direction.value if data.direction else None
await db.execute(
"""INSERT INTO crm_media
(id, customer_id, order_id, filename, nextcloud_path, mime_type,
direction, tags, uploaded_by, created_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
(media_id, data.customer_id, data.order_id, data.filename,
data.nextcloud_path, data.mime_type, direction,
tags_json, data.uploaded_by, now),
)
await db.commit()
rows = await db.execute_fetchall(
"SELECT * FROM crm_media WHERE id = ?", (media_id,)
)
return _row_to_media(dict(rows[0]))
async def delete_media(media_id: str) -> None:
db = await mqtt_db.get_db()
rows = await db.execute_fetchall(
"SELECT id FROM crm_media WHERE id = ?", (media_id,)
)
if not rows:
raise HTTPException(status_code=404, detail="Media entry not found")
await db.execute("DELETE FROM crm_media WHERE id = ?", (media_id,))
await db.commit()

View File

@@ -7,6 +7,8 @@ from devices.models import (
DeviceUsersResponse, DeviceUserInfo,
)
from devices import service
from mqtt import database as mqtt_db
from mqtt.models import DeviceAlertEntry, DeviceAlertsResponse
router = APIRouter(prefix="/api/devices", tags=["devices"])
@@ -67,3 +69,13 @@ async def delete_device(
_user: TokenPayload = Depends(require_permission("devices", "delete")),
):
service.delete_device(device_id)
@router.get("/{device_id}/alerts", response_model=DeviceAlertsResponse)
async def get_device_alerts(
device_id: str,
_user: TokenPayload = Depends(require_permission("devices", "view")),
):
"""Return the current active alert set for a device. Empty list means fully healthy."""
rows = await mqtt_db.get_alerts(device_id)
return DeviceAlertsResponse(alerts=[DeviceAlertEntry(**r) for r in rows])

View File

@@ -1,15 +1,24 @@
from pydantic import BaseModel
from typing import Optional, List
from enum import Enum
class UpdateType(str, Enum):
optional = "optional" # user-initiated only
mandatory = "mandatory" # auto-installs on next reboot
emergency = "emergency" # auto-installs on reboot + daily check + MQTT push
class FirmwareVersion(BaseModel):
id: str
hw_type: str # "vs", "vp", "vx"
channel: str # "stable", "beta", "alpha", "testing"
version: str # semver e.g. "1.4.2"
hw_type: str # e.g. "vesper", "vesper_plus", "vesper_pro"
channel: str # "stable", "beta", "alpha", "testing"
version: str # semver e.g. "1.5"
filename: str
size_bytes: int
sha256: str
update_type: UpdateType = UpdateType.mandatory
min_fw_version: Optional[str] = None # minimum fw version required to install this
uploaded_at: str
notes: Optional[str] = None
is_latest: bool = False
@@ -20,12 +29,19 @@ class FirmwareListResponse(BaseModel):
total: int
class FirmwareLatestResponse(BaseModel):
class FirmwareMetadataResponse(BaseModel):
"""Returned by both /latest and /{version}/info endpoints."""
hw_type: str
channel: str
version: str
size_bytes: int
sha256: str
update_type: UpdateType
min_fw_version: Optional[str] = None
download_url: str
uploaded_at: str
notes: Optional[str] = None
# Keep backwards-compatible alias
FirmwareLatestResponse = FirmwareMetadataResponse

View File

@@ -4,7 +4,7 @@ from typing import Optional
from auth.models import TokenPayload
from auth.dependencies import require_permission
from firmware.models import FirmwareVersion, FirmwareListResponse, FirmwareLatestResponse
from firmware.models import FirmwareVersion, FirmwareListResponse, FirmwareMetadataResponse, UpdateType
from firmware import service
router = APIRouter(prefix="/api/firmware", tags=["firmware"])
@@ -15,6 +15,8 @@ async def upload_firmware(
hw_type: str = Form(...),
channel: str = Form(...),
version: str = Form(...),
update_type: UpdateType = Form(UpdateType.mandatory),
min_fw_version: Optional[str] = Form(None),
notes: Optional[str] = Form(None),
file: UploadFile = File(...),
_user: TokenPayload = Depends(require_permission("manufacturing", "add")),
@@ -25,6 +27,8 @@ async def upload_firmware(
channel=channel,
version=version,
file_bytes=file_bytes,
update_type=update_type,
min_fw_version=min_fw_version,
notes=notes,
)
@@ -39,7 +43,7 @@ def list_firmware(
return FirmwareListResponse(firmware=items, total=len(items))
@router.get("/{hw_type}/{channel}/latest", response_model=FirmwareLatestResponse)
@router.get("/{hw_type}/{channel}/latest", response_model=FirmwareMetadataResponse)
def get_latest_firmware(hw_type: str, channel: str):
"""Returns metadata for the latest firmware for a given hw_type + channel.
No auth required — devices call this endpoint to check for updates.
@@ -47,6 +51,14 @@ def get_latest_firmware(hw_type: str, channel: str):
return service.get_latest(hw_type, channel)
@router.get("/{hw_type}/{channel}/{version}/info", response_model=FirmwareMetadataResponse)
def get_firmware_info(hw_type: str, channel: str, version: str):
"""Returns metadata for a specific firmware version.
No auth required — devices call this to resolve upgrade chains.
"""
return service.get_version_info(hw_type, channel, version)
@router.get("/{hw_type}/{channel}/{version}/firmware.bin")
def download_firmware(hw_type: str, channel: str, version: str):
"""Download the firmware binary. No auth required — devices call this directly."""

View File

@@ -8,11 +8,11 @@ from fastapi import HTTPException
from config import settings
from shared.firebase import get_db
from shared.exceptions import NotFoundError
from firmware.models import FirmwareVersion, FirmwareLatestResponse
from firmware.models import FirmwareVersion, FirmwareMetadataResponse, UpdateType
COLLECTION = "firmware_versions"
VALID_HW_TYPES = {"vs", "vp", "vx"}
VALID_HW_TYPES = {"vesper", "vesper_plus", "vesper_pro", "chronos", "chronos_pro", "agnus", "agnus_mini"}
VALID_CHANNELS = {"stable", "beta", "alpha", "testing"}
@@ -36,23 +36,43 @@ def _doc_to_firmware_version(doc) -> FirmwareVersion:
filename=data.get("filename", "firmware.bin"),
size_bytes=data.get("size_bytes", 0),
sha256=data.get("sha256", ""),
update_type=data.get("update_type", UpdateType.mandatory),
min_fw_version=data.get("min_fw_version"),
uploaded_at=uploaded_str,
notes=data.get("notes"),
is_latest=data.get("is_latest", False),
)
def _fw_to_metadata_response(fw: FirmwareVersion) -> FirmwareMetadataResponse:
download_url = f"/api/firmware/{fw.hw_type}/{fw.channel}/{fw.version}/firmware.bin"
return FirmwareMetadataResponse(
hw_type=fw.hw_type,
channel=fw.channel,
version=fw.version,
size_bytes=fw.size_bytes,
sha256=fw.sha256,
update_type=fw.update_type,
min_fw_version=fw.min_fw_version,
download_url=download_url,
uploaded_at=fw.uploaded_at,
notes=fw.notes,
)
def upload_firmware(
hw_type: str,
channel: str,
version: str,
file_bytes: bytes,
update_type: UpdateType = UpdateType.mandatory,
min_fw_version: str | None = None,
notes: str | None = None,
) -> FirmwareVersion:
if hw_type not in VALID_HW_TYPES:
raise HTTPException(status_code=400, detail=f"Invalid hw_type. Must be one of: {', '.join(VALID_HW_TYPES)}")
raise HTTPException(status_code=400, detail=f"Invalid hw_type. Must be one of: {', '.join(sorted(VALID_HW_TYPES))}")
if channel not in VALID_CHANNELS:
raise HTTPException(status_code=400, detail=f"Invalid channel. Must be one of: {', '.join(VALID_CHANNELS)}")
raise HTTPException(status_code=400, detail=f"Invalid channel. Must be one of: {', '.join(sorted(VALID_CHANNELS))}")
dest = _storage_path(hw_type, channel, version)
dest.parent.mkdir(parents=True, exist_ok=True)
@@ -83,6 +103,8 @@ def upload_firmware(
"filename": "firmware.bin",
"size_bytes": len(file_bytes),
"sha256": sha256,
"update_type": update_type.value,
"min_fw_version": min_fw_version,
"uploaded_at": now,
"notes": notes,
"is_latest": True,
@@ -108,7 +130,7 @@ def list_firmware(
return items
def get_latest(hw_type: str, channel: str) -> FirmwareLatestResponse:
def get_latest(hw_type: str, channel: str) -> FirmwareMetadataResponse:
if hw_type not in VALID_HW_TYPES:
raise HTTPException(status_code=400, detail=f"Invalid hw_type '{hw_type}'")
if channel not in VALID_CHANNELS:
@@ -126,18 +148,29 @@ def get_latest(hw_type: str, channel: str) -> FirmwareLatestResponse:
if not docs:
raise NotFoundError("Firmware")
fw = _doc_to_firmware_version(docs[0])
download_url = f"/api/firmware/{hw_type}/{channel}/{fw.version}/firmware.bin"
return FirmwareLatestResponse(
hw_type=fw.hw_type,
channel=fw.channel,
version=fw.version,
size_bytes=fw.size_bytes,
sha256=fw.sha256,
download_url=download_url,
uploaded_at=fw.uploaded_at,
notes=fw.notes,
return _fw_to_metadata_response(_doc_to_firmware_version(docs[0]))
def get_version_info(hw_type: str, channel: str, version: str) -> FirmwareMetadataResponse:
"""Fetch metadata for a specific version. Used by devices resolving upgrade chains."""
if hw_type not in VALID_HW_TYPES:
raise HTTPException(status_code=400, detail=f"Invalid hw_type '{hw_type}'")
if channel not in VALID_CHANNELS:
raise HTTPException(status_code=400, detail=f"Invalid channel '{channel}'")
db = get_db()
docs = list(
db.collection(COLLECTION)
.where("hw_type", "==", hw_type)
.where("channel", "==", channel)
.where("version", "==", version)
.limit(1)
.stream()
)
if not docs:
raise NotFoundError("Firmware version")
return _fw_to_metadata_response(_doc_to_firmware_version(docs[0]))
def get_firmware_path(hw_type: str, channel: str, version: str) -> Path:

View File

@@ -17,6 +17,15 @@ from builder.router import router as builder_router
from manufacturing.router import router as manufacturing_router
from firmware.router import router as firmware_router
from admin.router import router as admin_router
from crm.router import router as crm_products_router
from crm.customers_router import router as crm_customers_router
from crm.orders_router import router as crm_orders_router
from crm.comms_router import router as crm_comms_router
from crm.media_router import router as crm_media_router
from crm.nextcloud_router import router as crm_nextcloud_router
from crm.quotations_router import router as crm_quotations_router
from crm.nextcloud import close_client as close_nextcloud_client, keepalive_ping as nextcloud_keepalive
from crm.mail_accounts import get_mail_accounts
from mqtt.client import mqtt_manager
from mqtt import database as mqtt_db
from melodies import service as melody_service
@@ -50,6 +59,30 @@ app.include_router(builder_router)
app.include_router(manufacturing_router)
app.include_router(firmware_router)
app.include_router(admin_router)
app.include_router(crm_products_router)
app.include_router(crm_customers_router)
app.include_router(crm_orders_router)
app.include_router(crm_comms_router)
app.include_router(crm_media_router)
app.include_router(crm_nextcloud_router)
app.include_router(crm_quotations_router)
async def nextcloud_keepalive_loop():
await nextcloud_keepalive() # eager warmup on startup
while True:
await asyncio.sleep(45)
await nextcloud_keepalive()
async def email_sync_loop():
while True:
await asyncio.sleep(settings.email_sync_interval_minutes * 60)
try:
from crm.email_sync import sync_emails
await sync_emails()
except Exception as e:
print(f"[EMAIL SYNC] Error: {e}")
@app.on_event("startup")
@@ -59,12 +92,20 @@ async def startup():
await melody_service.migrate_from_firestore()
mqtt_manager.start(asyncio.get_event_loop())
asyncio.create_task(mqtt_db.purge_loop())
asyncio.create_task(nextcloud_keepalive_loop())
sync_accounts = [a for a in get_mail_accounts() if a.get("sync_inbound") and a.get("imap_host")]
if sync_accounts:
print(f"[EMAIL SYNC] IMAP configured for {len(sync_accounts)} account(s) - starting sync loop")
asyncio.create_task(email_sync_loop())
else:
print("[EMAIL SYNC] IMAP not configured - sync loop disabled")
@app.on_event("shutdown")
async def shutdown():
mqtt_manager.stop()
await mqtt_db.close_db()
await close_nextcloud_client()
@app.get("/api/health")
@@ -74,3 +115,4 @@ async def health_check():
"firebase": firebase_initialized,
"mqtt": mqtt_manager.connected,
}

View File

@@ -4,23 +4,23 @@ from enum import Enum
class BoardType(str, Enum):
vs = "vs" # Vesper
vp = "vp" # Vesper Plus
vx = "vx" # Vesper Pro
cb = "cb" # Chronos
cp = "cp" # Chronos Pro
am = "am" # Agnus Mini
ab = "ab" # Agnus
vesper = "vesper"
vesper_plus = "vesper_plus"
vesper_pro = "vesper_pro"
chronos = "chronos"
chronos_pro = "chronos_pro"
agnus_mini = "agnus_mini"
agnus = "agnus"
BOARD_TYPE_LABELS = {
"vs": "Vesper",
"vp": "Vesper Plus",
"vx": "Vesper Pro",
"cb": "Chronos",
"cp": "Chronos Pro",
"am": "Agnus Mini",
"ab": "Agnus",
"vesper": "Vesper",
"vesper_plus": "Vesper+",
"vesper_pro": "Vesper Pro",
"chronos": "Chronos",
"chronos_pro": "Chronos Pro",
"agnus_mini": "Agnus Mini",
"agnus": "Agnus",
}

View File

@@ -26,7 +26,7 @@ class MqttManager:
self._client = paho_mqtt.Client(
callback_api_version=paho_mqtt.CallbackAPIVersion.VERSION2,
client_id="bellsystems-admin-panel",
client_id=settings.mqtt_client_id,
clean_session=True,
)
@@ -64,6 +64,8 @@ class MqttManager:
client.subscribe([
("vesper/+/data", 1),
("vesper/+/status/heartbeat", 1),
("vesper/+/status/alerts", 1),
("vesper/+/status/info", 0),
("vesper/+/logs", 1),
])
else:

View File

@@ -76,6 +76,102 @@ SCHEMA_STATEMENTS = [
)""",
"CREATE INDEX IF NOT EXISTS idx_mfg_audit_time ON mfg_audit_log(timestamp)",
"CREATE INDEX IF NOT EXISTS idx_mfg_audit_action ON mfg_audit_log(action)",
# Active device alerts (current state, not history)
"""CREATE TABLE IF NOT EXISTS device_alerts (
device_serial TEXT NOT NULL,
subsystem TEXT NOT NULL,
state TEXT NOT NULL,
message TEXT,
updated_at TEXT NOT NULL DEFAULT (datetime('now')),
PRIMARY KEY (device_serial, subsystem)
)""",
"CREATE INDEX IF NOT EXISTS idx_device_alerts_serial ON device_alerts(device_serial)",
# CRM communications log
"""CREATE TABLE IF NOT EXISTS crm_comms_log (
id TEXT PRIMARY KEY,
customer_id TEXT,
type TEXT NOT NULL,
mail_account TEXT,
direction TEXT NOT NULL,
subject TEXT,
body TEXT,
body_html TEXT,
attachments TEXT NOT NULL DEFAULT '[]',
ext_message_id TEXT,
from_addr TEXT,
to_addrs TEXT,
logged_by TEXT,
occurred_at TEXT NOT NULL,
created_at TEXT NOT NULL
)""",
"CREATE INDEX IF NOT EXISTS idx_crm_comms_customer ON crm_comms_log(customer_id, occurred_at)",
# CRM media references
"""CREATE TABLE IF NOT EXISTS crm_media (
id TEXT PRIMARY KEY,
customer_id TEXT,
order_id TEXT,
filename TEXT NOT NULL,
nextcloud_path TEXT NOT NULL,
mime_type TEXT,
direction TEXT,
tags TEXT NOT NULL DEFAULT '[]',
uploaded_by TEXT,
created_at TEXT NOT NULL
)""",
"CREATE INDEX IF NOT EXISTS idx_crm_media_customer ON crm_media(customer_id)",
"CREATE INDEX IF NOT EXISTS idx_crm_media_order ON crm_media(order_id)",
# CRM sync state (last email sync timestamp, etc.)
"""CREATE TABLE IF NOT EXISTS crm_sync_state (
key TEXT PRIMARY KEY,
value TEXT
)""",
# CRM Quotations
"""CREATE TABLE IF NOT EXISTS crm_quotations (
id TEXT PRIMARY KEY,
quotation_number TEXT UNIQUE NOT NULL,
title TEXT,
subtitle TEXT,
customer_id TEXT NOT NULL,
language TEXT NOT NULL DEFAULT 'en',
status TEXT NOT NULL DEFAULT 'draft',
order_type TEXT,
shipping_method TEXT,
estimated_shipping_date TEXT,
global_discount_label TEXT,
global_discount_percent REAL NOT NULL DEFAULT 0,
vat_percent REAL NOT NULL DEFAULT 24,
shipping_cost REAL NOT NULL DEFAULT 0,
shipping_cost_discount REAL NOT NULL DEFAULT 0,
install_cost REAL NOT NULL DEFAULT 0,
install_cost_discount REAL NOT NULL DEFAULT 0,
extras_label TEXT,
extras_cost REAL NOT NULL DEFAULT 0,
comments TEXT NOT NULL DEFAULT '[]',
subtotal_before_discount REAL NOT NULL DEFAULT 0,
global_discount_amount REAL NOT NULL DEFAULT 0,
new_subtotal REAL NOT NULL DEFAULT 0,
vat_amount REAL NOT NULL DEFAULT 0,
final_total REAL NOT NULL DEFAULT 0,
nextcloud_pdf_path TEXT,
nextcloud_pdf_url TEXT,
created_at TEXT NOT NULL,
updated_at TEXT NOT NULL
)""",
"""CREATE TABLE IF NOT EXISTS crm_quotation_items (
id TEXT PRIMARY KEY,
quotation_id TEXT NOT NULL,
product_id TEXT,
description TEXT,
unit_type TEXT NOT NULL DEFAULT 'pcs',
unit_cost REAL NOT NULL DEFAULT 0,
discount_percent REAL NOT NULL DEFAULT 0,
quantity REAL NOT NULL DEFAULT 1,
line_total REAL NOT NULL DEFAULT 0,
sort_order INTEGER NOT NULL DEFAULT 0,
FOREIGN KEY (quotation_id) REFERENCES crm_quotations(id)
)""",
"CREATE INDEX IF NOT EXISTS idx_crm_quotations_customer ON crm_quotations(customer_id)",
"CREATE INDEX IF NOT EXISTS idx_crm_quotation_items_quotation ON crm_quotation_items(quotation_id, sort_order)",
]
@@ -86,6 +182,65 @@ async def init_db():
for stmt in SCHEMA_STATEMENTS:
await _db.execute(stmt)
await _db.commit()
# Migrations: add columns that may not exist in older DBs
_migrations = [
"ALTER TABLE crm_comms_log ADD COLUMN body_html TEXT",
"ALTER TABLE crm_comms_log ADD COLUMN mail_account TEXT",
"ALTER TABLE crm_comms_log ADD COLUMN from_addr TEXT",
"ALTER TABLE crm_comms_log ADD COLUMN to_addrs TEXT",
"ALTER TABLE crm_comms_log ADD COLUMN is_important INTEGER NOT NULL DEFAULT 0",
"ALTER TABLE crm_comms_log ADD COLUMN is_read INTEGER NOT NULL DEFAULT 0",
"ALTER TABLE crm_quotation_items ADD COLUMN vat_percent REAL NOT NULL DEFAULT 24",
"ALTER TABLE crm_quotations ADD COLUMN quick_notes TEXT NOT NULL DEFAULT '{}'",
"ALTER TABLE crm_quotations ADD COLUMN client_org TEXT",
"ALTER TABLE crm_quotations ADD COLUMN client_name TEXT",
"ALTER TABLE crm_quotations ADD COLUMN client_location TEXT",
"ALTER TABLE crm_quotations ADD COLUMN client_phone TEXT",
"ALTER TABLE crm_quotations ADD COLUMN client_email TEXT",
]
for m in _migrations:
try:
await _db.execute(m)
await _db.commit()
except Exception:
pass # column already exists
# Migration: drop NOT NULL on crm_comms_log.customer_id if it exists.
# SQLite doesn't support ALTER COLUMN, so we check via table_info and
# rebuild the table if needed.
rows = await _db.execute_fetchall("PRAGMA table_info(crm_comms_log)")
for row in rows:
# row: (cid, name, type, notnull, dflt_value, pk)
if row[1] == "customer_id" and row[3] == 1: # notnull=1
logger.info("Migrating crm_comms_log: removing NOT NULL from customer_id")
await _db.execute("ALTER TABLE crm_comms_log RENAME TO crm_comms_log_old")
await _db.execute("""CREATE TABLE crm_comms_log (
id TEXT PRIMARY KEY,
customer_id TEXT,
type TEXT NOT NULL,
mail_account TEXT,
direction TEXT NOT NULL,
subject TEXT,
body TEXT,
body_html TEXT,
attachments TEXT NOT NULL DEFAULT '[]',
ext_message_id TEXT,
from_addr TEXT,
to_addrs TEXT,
logged_by TEXT,
occurred_at TEXT NOT NULL,
created_at TEXT NOT NULL
)""")
await _db.execute("""INSERT INTO crm_comms_log
SELECT id, customer_id, type, NULL, direction, subject, body, body_html,
attachments, ext_message_id, from_addr, to_addrs, logged_by,
occurred_at, created_at
FROM crm_comms_log_old""")
await _db.execute("DROP TABLE crm_comms_log_old")
await _db.execute("CREATE INDEX IF NOT EXISTS idx_crm_comms_customer ON crm_comms_log(customer_id, occurred_at)")
await _db.commit()
logger.info("Migration complete: crm_comms_log.customer_id is now nullable")
break
logger.info(f"SQLite database initialized at {settings.sqlite_db_path}")
@@ -252,3 +407,37 @@ async def purge_loop():
await purge_old_data()
except Exception as e:
logger.error(f"Purge failed: {e}")
# --- Device Alerts ---
async def upsert_alert(device_serial: str, subsystem: str, state: str,
message: str | None = None):
db = await get_db()
await db.execute(
"""INSERT INTO device_alerts (device_serial, subsystem, state, message, updated_at)
VALUES (?, ?, ?, ?, datetime('now'))
ON CONFLICT(device_serial, subsystem)
DO UPDATE SET state=excluded.state, message=excluded.message,
updated_at=excluded.updated_at""",
(device_serial, subsystem, state, message),
)
await db.commit()
async def delete_alert(device_serial: str, subsystem: str):
db = await get_db()
await db.execute(
"DELETE FROM device_alerts WHERE device_serial = ? AND subsystem = ?",
(device_serial, subsystem),
)
await db.commit()
async def get_alerts(device_serial: str) -> list:
db = await get_db()
rows = await db.execute_fetchall(
"SELECT * FROM device_alerts WHERE device_serial = ? ORDER BY updated_at DESC",
(device_serial,),
)
return [dict(r) for r in rows]

View File

@@ -18,6 +18,10 @@ async def handle_message(serial: str, topic_type: str, payload: dict):
try:
if topic_type == "status/heartbeat":
await _handle_heartbeat(serial, payload)
elif topic_type == "status/alerts":
await _handle_alerts(serial, payload)
elif topic_type == "status/info":
await _handle_info(serial, payload)
elif topic_type == "logs":
await _handle_log(serial, payload)
elif topic_type == "data":
@@ -29,6 +33,8 @@ async def handle_message(serial: str, topic_type: str, payload: dict):
async def _handle_heartbeat(serial: str, payload: dict):
# Store silently — do not log as a visible event.
# The console surfaces an alert only when the device goes silent (no heartbeat for 90s).
inner = payload.get("payload", {})
await db.insert_heartbeat(
device_serial=serial,
@@ -55,6 +61,31 @@ async def _handle_log(serial: str, payload: dict):
)
async def _handle_alerts(serial: str, payload: dict):
subsystem = payload.get("subsystem", "")
state = payload.get("state", "")
if not subsystem or not state:
logger.warning(f"Malformed alert payload from {serial}: {payload}")
return
if state == "CLEARED":
await db.delete_alert(serial, subsystem)
else:
await db.upsert_alert(serial, subsystem, state, payload.get("msg"))
async def _handle_info(serial: str, payload: dict):
event_type = payload.get("type", "")
data = payload.get("payload", {})
if event_type == "playback_started":
logger.debug(f"{serial}: playback started — melody_uid={data.get('melody_uid')}")
elif event_type == "playback_stopped":
logger.debug(f"{serial}: playback stopped")
else:
logger.debug(f"{serial}: info event '{event_type}'")
async def _handle_data_response(serial: str, payload: dict):
status = payload.get("status", "")

View File

@@ -84,3 +84,15 @@ class CommandSendResponse(BaseModel):
success: bool
command_id: int
message: str
class DeviceAlertEntry(BaseModel):
device_serial: str
subsystem: str
state: str
message: Optional[str] = None
updated_at: str
class DeviceAlertsResponse(BaseModel):
alerts: List[DeviceAlertEntry]

View File

@@ -9,4 +9,7 @@ passlib[bcrypt]==1.7.4
python-multipart==0.0.20
bcrypt==4.0.1
aiosqlite==0.20.0
resend==2.10.0
resend==2.10.0
httpx>=0.27.0
weasyprint>=62.0
jinja2>=3.1.0

Binary file not shown.

After

Width:  |  Height:  |  Size: 21 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 20 KiB

BIN
backend/templates/logo.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 14 KiB

View File

@@ -0,0 +1,708 @@
<!DOCTYPE html>
<html lang="{{ lang }}">
<head>
<meta charset="UTF-8"/>
<title>{% if lang == 'gr' %}Προσφορά{% else %}Quotation{% endif %} {{ quotation.quotation_number }}</title>
<style>
@import url('https://fonts.googleapis.com/css2?family=Noto+Sans:ital,wght@0,400;0,600;0,700;1,400&display=swap');
* { box-sizing: border-box; margin: 0; padding: 0; }
body {
font-family: 'Noto Sans', DejaVu Sans, Arial, sans-serif;
font-size: 9.5pt;
color: #1a1a2e;
background: #fff;
line-height: 1.45;
display: flex;
flex-direction: column;
min-height: 100vh;
padding-bottom: 36mm;
}
/* pushes notes + validity down toward the fixed footer */
.main-content-gap {
flex-grow: 1;
}
@page {
size: A4;
margin: 15mm 15mm 15mm 15mm;
}
/* ── HEADER ── */
.header {
display: flex;
justify-content: space-between;
align-items: flex-start;
padding-bottom: 10px;
border-bottom: 2.5px solid #5886c4;
margin-bottom: 14px;
}
.company-block {
display: flex;
flex-direction: column;
justify-content: flex-start;
}
.company-block img.logo {
max-height: 70px;
max-width: 250px;
object-fit: contain;
display: block;
margin-bottom: 5px;
}
.company-block p {
font-size: 10pt;
color: #6b8fc4;
margin-top: 1px;
}
.quotation-meta-block {
text-align: right;
}
.quotation-meta-block .doc-type {
font-size: 14pt;
font-weight: 700;
color: #5886c4;
text-transform: uppercase;
letter-spacing: 1px;
margin-bottom: 4px;
}
.quotation-meta-block .meta-line {
font-size: 8.5pt;
text-align: right;
white-space: nowrap;
line-height: 1.6;
}
.quotation-meta-block .meta-line .meta-label {
color: #7a9cc8;
}
.quotation-meta-block .meta-line .meta-value {
font-weight: 600;
color: #1a1a2e;
}
/* ── CLIENT + ORDER META ── */
.info-row {
display: flex;
align-items: stretch;
gap: 16px;
margin-bottom: 12px;
}
.client-block, .order-block {
border: 1px solid #c2d4ec;
border-radius: 5px;
padding: 6px 10px;
}
.client-block { flex: 65; }
.order-block { flex: 35; }
.block-title {
font-size: 7.5pt;
font-weight: 700;
text-transform: uppercase;
color: #5886c4;
letter-spacing: 0.5px;
margin-bottom: 3px;
border-bottom: 1px solid #dce9f7;
padding-bottom: 2px;
}
.info-row table.fields {
border-collapse: collapse;
width: 100%;
}
.info-row table.fields td {
padding: 1px 0;
vertical-align: top;
}
.info-row table.fields td.lbl {
font-size: 8pt;
color: #7a9cc8;
white-space: nowrap;
padding-right: 8px;
}
.info-row table.fields td.val {
font-size: 8.5pt;
font-weight: 500;
color: #1a1a2e;
}
/* ── TITLE / SUBTITLE ── */
.quotation-title {
margin-bottom: 10px;
}
.quotation-title h2 {
font-size: 13pt;
font-weight: 700;
color: #3a6aad;
}
.quotation-title p {
font-size: 9pt;
color: #555;
margin-top: 2px;
}
/* ── ITEMS TABLE ── */
.items-table {
width: 100%;
border-collapse: collapse;
margin-bottom: 0;
font-size: 8.5pt;
}
.items-table thead tr {
background: #5886c4;
color: #fff;
}
.items-table thead th {
padding: 6px 8px;
text-align: left;
font-weight: 600;
font-size: 8pt;
text-transform: uppercase;
letter-spacing: 0.3px;
}
.items-table thead th.right { text-align: right; }
.items-table thead th.center { text-align: center; }
.items-table tbody tr:nth-child(even) { background: #eef4fc; }
.items-table tbody tr:nth-child(odd) { background: #fff; }
.items-table tbody td {
padding: 5px 8px;
border-bottom: 1px solid #dce9f7;
vertical-align: middle;
}
.items-table tbody td.right { text-align: right; }
.items-table tbody td.center { text-align: center; }
.items-table tbody td.muted { color: #7a9cc8; font-size: 8pt; }
/* Special rows for shipping/install */
.items-table tbody tr.special-row td {
background: #edf3fb;
border-top: 1px solid #c2d4ec;
border-bottom: 1px solid #c2d4ec;
font-style: italic;
color: #3a6aad;
}
.items-table tbody tr.special-spacer td {
height: 6px;
background: #f4f8fd;
border: none;
padding: 0;
}
/* ── BELOW TABLE ROW: VAT notice + totals ── */
.below-table {
display: flex;
justify-content: space-between;
align-items: flex-start;
margin-top: 0;
margin-bottom: 14px;
}
.vat-notice {
flex: 1;
padding-top: 8px;
padding-right: 16px;
}
.vat-notice p {
font-size: 8pt;
font-weight: 700;
color: #3a6aad;
text-transform: uppercase;
letter-spacing: 0.3px;
border-left: 3px solid #5886c4;
padding-left: 7px;
padding-top: 2px;
padding-bottom: 2px;
}
/* ── TOTALS ── */
.totals-table {
width: 280px;
border-collapse: collapse;
font-size: 8.5pt;
flex-shrink: 0;
}
.totals-table td {
padding: 4px 10px;
border-bottom: 1px solid #dce9f7;
}
.totals-table .label { color: #555; text-align: right; }
.totals-table .value { text-align: right; font-weight: 500; min-width: 90px; }
.totals-table .discount-row { color: #c0392b; }
.totals-table .new-subtotal-row td { font-size: 10pt; font-weight: 700; color: #1a1a2e; }
.totals-table .vat-row td { color: #7a9cc8; font-style: italic; }
.totals-table .final-row td {
font-size: 11pt;
font-weight: 700;
color: #3a6aad;
border-top: 2px solid #5886c4;
border-bottom: none;
padding-top: 6px;
padding-bottom: 6px;
}
/* ── COMMENTS ── */
.comments-section {
margin-bottom: 14px;
}
.comments-section .section-title {
font-size: 8pt;
font-weight: 700;
text-transform: uppercase;
color: #5886c4;
letter-spacing: 0.5px;
margin-bottom: 5px;
}
.comments-section ul {
padding-left: 14px;
}
.comments-section li {
font-size: 8.5pt;
color: #333;
margin-bottom: 3px;
line-height: 1.4;
}
/* ── FOOTER (validity line only) ── */
.footer {
border-top: 1px solid #c2d4ec;
padding-top: 7px;
margin-top: 10px;
margin-bottom: 10px;
}
.footer .validity {
font-size: 7.5pt;
font-style: italic;
color: #7a9cc8;
}
/* ── FIXED BOTTOM FOOTER (repeats on every page) ── */
.fixed-footer {
position: fixed;
bottom: 0;
left: 0;
right: 0;
padding: 8px 0 0 0;
border-top: 1.5px solid #5886c4;
display: flex;
align-items: stretch;
gap: 20px;
background: #fff;
}
.footer-block {
width: 30%;
flex-shrink: 0;
}
.footer-block-title {
font-size: 7pt;
font-weight: 700;
text-transform: uppercase;
color: #5886c4;
letter-spacing: 0.4px;
margin-bottom: 4px;
border-bottom: 1px solid #dce9f7;
padding-bottom: 2px;
}
.footer-block dl {
display: grid;
grid-template-columns: max-content 1fr;
gap: 2px 6px;
padding-left: 0;
margin-left: 0;
}
.footer-block dt {
font-size: 7pt;
color: #7a9cc8;
white-space: nowrap;
}
.footer-block dd {
font-size: 7pt;
color: #1a1a2e;
font-weight: 500;
}
.footer-ref {
margin-left: auto;
display: flex;
flex-direction: column;
justify-content: flex-end;
align-items: flex-end;
flex-shrink: 0;
}
.footer-ref .ref-quot {
font-size: 7.5pt;
font-weight: 700;
color: #5886c4;
line-height: 1.4;
}
.footer-ref .ref-page {
font-size: 7pt;
color: #7a9cc8;
line-height: 1.4;
}
.footer-ref .ref-page::after {
content: counter(page) " / " counter(pages);
}
/* ── UTILS ── */
.text-muted { color: #aaa; }
.dash { color: #ccc; }
</style>
</head>
<body>
{# ── Bilingual labels ── #}
{% if lang == 'gr' %}
{% set L_QUOTATION = "ΠΡΟΣΦΟΡΑ" %}
{% set L_NUMBER = "Αριθμός" %}
{% set L_DATE = "Ημερομηνία" %}
{% set L_CLIENT = "ΣΤΟΙΧΕΙΑ ΠΕΛΑΤΗ" %}
{% set L_ORDER_META = "ΣΤΟΙΧΕΙΑ ΠΑΡΑΓΓΕΛΙΑΣ" %}
{% set L_ORDER_TYPE = "Τύπος" %}
{% set L_SHIP_METHOD = "Τρ. Αποστολής" %}
{% set L_SHIP_DATE = "Εκτιμώμενη Παράδοση" %}
{% set L_DESC = "Περιγραφή" %}
{% set L_UNIT_COST = "Τιμή Μον." %}
{% set L_DISC = "Έκπτ." %}
{% set L_QTY = "Ποσ." %}
{% set L_UNIT = "Μον." %}
{% set L_VAT_COL = "Φ.Π.Α." %}
{% set L_TOTAL = "Σύνολο" %}
{% set L_SUBTOTAL = "Υποσύνολο" %}
{% set L_GLOBAL_DISC = quotation.global_discount_label or "Έκπτωση" %}
{% set L_NEW_SUBTOTAL = "Νέο Υποσύνολο" %}
{% set L_VAT = "ΣΥΝΟΛΟ Φ.Π.Α." %}
{% set L_SHIPPING_COST = "Μεταφορικά / Shipping" %}
{% set L_INSTALL_COST = "Εγκατάσταση / Installation" %}
{% set L_EXTRAS = quotation.extras_label or "Άλλα" %}
{% set L_FINAL = "ΣΥΝΟΛΟ ΠΛΗΡΩΤΕΟ" %}
{% set L_COMMENTS = "ΣΗΜΕΙΩΣΕΙΣ" %}
{% set L_VALIDITY = "Η προσφορά ισχύει για 30 ημέρες από την ημερομηνία έκδοσής της." %}
{% set L_ORG = "Φορέας" %}
{% set L_CONTACT = "Επικοινωνία" %}
{% set L_ADDRESS = "Διεύθυνση" %}
{% set L_PHONE = "Τηλέφωνο" %}
{% set L_COMPANY_ADDR = "Ε.Ο. Αντιρρίου Ιωαννίνων 23, Αγρίνιο, 30131" %}
{% set L_CONTACT_INFO = "ΣΤΟΙΧΕΙΑ ΕΠΙΚΟΙΝΩΝΙΑΣ" %}
{% set L_PAYMENT_INFO = "ΣΤΟΙΧΕΙΑ ΠΛΗΡΩΜΗΣ" %}
{% else %}
{% set L_QUOTATION = "QUOTATION" %}
{% set L_NUMBER = "Number" %}
{% set L_DATE = "Date" %}
{% set L_CLIENT = "CLIENT DETAILS" %}
{% set L_ORDER_META = "ORDER DETAILS" %}
{% set L_ORDER_TYPE = "Order Type" %}
{% set L_SHIP_METHOD = "Ship. Method" %}
{% set L_SHIP_DATE = "Est. Delivery" %}
{% set L_DESC = "Description" %}
{% set L_UNIT_COST = "Unit Cost" %}
{% set L_DISC = "Disc." %}
{% set L_QTY = "Qty" %}
{% set L_UNIT = "Unit" %}
{% set L_VAT_COL = "VAT" %}
{% set L_TOTAL = "Total" %}
{% set L_SUBTOTAL = "Subtotal" %}
{% set L_GLOBAL_DISC = quotation.global_discount_label or "Discount" %}
{% set L_NEW_SUBTOTAL = "New Subtotal" %}
{% set L_VAT = "Total VAT" %}
{% set L_SHIPPING_COST = "Shipping / Transport" %}
{% set L_INSTALL_COST = "Installation" %}
{% set L_EXTRAS = quotation.extras_label or "Extras" %}
{% set L_FINAL = "TOTAL DUE" %}
{% set L_COMMENTS = "NOTES" %}
{% set L_VALIDITY = "This quotation is valid for 30 days from the date of issue." %}
{% set L_ORG = "Organization" %}
{% set L_CONTACT = "Contact" %}
{% set L_ADDRESS = "Location" %}
{% set L_PHONE = "Phone" %}
{% set L_COMPANY_ADDR = "E.O. Antirriou Ioanninon 23, Agrinio, 30131, Greece" %}
{% set L_CONTACT_INFO = "CONTACT INFORMATION" %}
{% set L_PAYMENT_INFO = "PAYMENT DETAILS" %}
{% endif %}
{# ── Derived values ── #}
{% set today = quotation.created_at[:10] %}
{# ── Find phone/email contacts + check if primary contact is already phone/email ── #}
{% set ns = namespace(customer_phone='', customer_email='', primary_is_phone=false, primary_is_email=false) %}
{% for contact in customer.contacts %}
{% if contact.type == 'phone' and contact.value %}{% if contact.primary %}{% set ns.customer_phone = contact.value %}{% set ns.primary_is_phone = true %}{% elif not ns.customer_phone %}{% set ns.customer_phone = contact.value %}{% endif %}{% endif %}
{% if contact.type == 'email' and contact.value %}{% if contact.primary %}{% set ns.customer_email = contact.value %}{% set ns.primary_is_email = true %}{% elif not ns.customer_email %}{% set ns.customer_email = contact.value %}{% endif %}{% endif %}
{% endfor %}
{% set customer_phone = ns.customer_phone %}
{% set customer_email = ns.customer_email %}
{% set primary_is_phone = ns.primary_is_phone %}
{% set primary_is_email = ns.primary_is_email %}
<!-- HEADER -->
<div class="header">
<div class="company-block">
<img class="logo" src="./logo.png" alt="BellSystems"/>
<p>{{ L_COMPANY_ADDR }}</p>
</div>
<div class="quotation-meta-block">
<div class="doc-type">{{ L_QUOTATION }}</div>
<div class="meta-line"><span class="meta-label">{{ L_NUMBER }}: </span><span class="meta-value">{{ quotation.quotation_number }}</span></div>
<div class="meta-line"><span class="meta-label">{{ L_DATE }}: </span><span class="meta-value">{{ today }}</span></div>
</div>
</div>
<!-- TITLE / SUBTITLE -->
{% if quotation.title %}
<div class="quotation-title">
<h2>{{ quotation.title }}</h2>
{% if quotation.subtitle %}<p>{{ quotation.subtitle }}</p>{% endif %}
</div>
{% endif %}
<!-- CLIENT + ORDER META -->
<div class="info-row">
<div class="client-block">
<div class="block-title">{{ L_CLIENT }}</div>
<table class="fields"><tbody>{% if customer.organization %}<tr><td class="lbl">{{ L_ORG }}</td><td class="val">{{ customer.organization }}</td></tr>{% endif %}{% set name_parts = [customer.title, customer.name, customer.surname] | select | list %}{% if name_parts %}<tr><td class="lbl">{{ L_CONTACT }}</td><td class="val">{{ name_parts | join(' ') }}</td></tr>{% endif %}{% if customer.location %}{% set loc_parts = [customer.location.city, customer.location.region, customer.location.country] | select | list %}{% if loc_parts %}<tr><td class="lbl">{{ L_ADDRESS }}</td><td class="val">{{ loc_parts | join(', ') }}</td></tr>{% endif %}{% endif %}{% if customer_email %}<tr><td class="lbl">Email</td><td class="val">{{ customer_email }}</td></tr>{% endif %}{% if customer_phone %}<tr><td class="lbl">{{ L_PHONE }}</td><td class="val">{{ customer_phone }}</td></tr>{% endif %}</tbody></table>
</div>
<div class="order-block">
<div class="block-title">{{ L_ORDER_META }}</div>
<table class="fields"><tbody>{% if quotation.order_type %}<tr><td class="lbl">{{ L_ORDER_TYPE }}</td><td class="val">{{ quotation.order_type }}</td></tr>{% endif %}{% if quotation.shipping_method %}<tr><td class="lbl">{{ L_SHIP_METHOD }}</td><td class="val">{{ quotation.shipping_method }}</td></tr>{% endif %}{% if quotation.estimated_shipping_date %}<tr><td class="lbl">{{ L_SHIP_DATE }}</td><td class="val">{{ quotation.estimated_shipping_date }}</td></tr>{% else %}<tr><td class="lbl">{{ L_SHIP_DATE }}</td><td class="val text-muted"></td></tr>{% endif %}</tbody></table>
</div>
</div>
<!-- ITEMS TABLE -->
<table class="items-table">
<thead>
<tr>
<th style="width:38%">{{ L_DESC }}</th>
<th class="right" style="width:11%">{{ L_UNIT_COST }}</th>
<th class="center" style="width:7%">{{ L_DISC }}</th>
<th class="center" style="width:7%">{{ L_QTY }}</th>
<th class="center" style="width:7%">{{ L_UNIT }}</th>
<th class="center" style="width:6%">{{ L_VAT_COL }}</th>
<th class="right" style="width:12%">{{ L_TOTAL }}</th>
</tr>
</thead>
<tbody>
{% for item in quotation.items %}
<tr>
<td>{{ item.description or '' }}</td>
<td class="right">{{ item.unit_cost | format_money }}</td>
<td class="center">
{% if item.discount_percent and item.discount_percent > 0 %}
{{ item.discount_percent | int }}%
{% else %}
<span class="dash"></span>
{% endif %}
</td>
<td class="center">{{ item.quantity | int if item.quantity == (item.quantity | int) else item.quantity }}</td>
<td class="center muted">{{ item.unit_type }}</td>
<td class="center">
{% if item.vat_percent and item.vat_percent > 0 %}
{{ item.vat_percent | int }}%
{% else %}
<span class="dash"></span>
{% endif %}
</td>
<td class="right">{{ item.line_total | format_money }}</td>
</tr>
{% endfor %}
{% if quotation.items | length == 0 %}
<tr>
<td colspan="7" class="text-muted" style="text-align:center; padding: 12px;"></td>
</tr>
{% endif %}
{# ── Shipping / Install as special rows ── #}
{% set has_special = (quotation.shipping_cost and quotation.shipping_cost > 0) or (quotation.install_cost and quotation.install_cost > 0) %}
{% if has_special %}
<tr class="special-spacer"><td colspan="7"></td></tr>
{% endif %}
{% if quotation.shipping_cost and quotation.shipping_cost > 0 %}
{% set ship_net = quotation.shipping_cost * (1 - quotation.shipping_cost_discount / 100) %}
<tr class="special-row">
<td>{{ L_SHIPPING_COST }}{% if quotation.shipping_cost_discount and quotation.shipping_cost_discount > 0 %} <span style="font-size:7.5pt; color:#7a9cc8;">(-{{ quotation.shipping_cost_discount | int }}%)</span>{% endif %}</td>
<td class="right">{{ quotation.shipping_cost | format_money }}</td>
<td class="center"><span class="dash"></span></td>
<td class="center">1</td>
<td class="center muted"></td>
<td class="center"><span class="dash"></span></td>
<td class="right">{{ ship_net | format_money }}</td>
</tr>
{% endif %}
{% if quotation.install_cost and quotation.install_cost > 0 %}
{% set install_net = quotation.install_cost * (1 - quotation.install_cost_discount / 100) %}
<tr class="special-row">
<td>{{ L_INSTALL_COST }}{% if quotation.install_cost_discount and quotation.install_cost_discount > 0 %} <span style="font-size:7.5pt; color:#7a9cc8;">(-{{ quotation.install_cost_discount | int }}%)</span>{% endif %}</td>
<td class="right">{{ quotation.install_cost | format_money }}</td>
<td class="center"><span class="dash"></span></td>
<td class="center">1</td>
<td class="center muted"></td>
<td class="center"><span class="dash"></span></td>
<td class="right">{{ install_net | format_money }}</td>
</tr>
{% endif %}
</tbody>
</table>
<!-- TOTALS + VAT NOTICE -->
<div class="below-table">
<div class="vat-notice">
</div>
<table class="totals-table">
<tr>
<td class="label">{{ L_SUBTOTAL }}</td>
<td class="value">{{ quotation.subtotal_before_discount | format_money }}</td>
</tr>
{% if quotation.global_discount_percent and quotation.global_discount_percent > 0 %}
<tr class="discount-row">
<td class="label">{{ L_GLOBAL_DISC }} ({{ quotation.global_discount_percent | int }}%)</td>
<td class="value">- {{ quotation.global_discount_amount | format_money }}</td>
</tr>
<tr class="new-subtotal-row">
<td class="label">{{ L_NEW_SUBTOTAL }}</td>
<td class="value">{{ quotation.new_subtotal | format_money }}</td>
</tr>
{% endif %}
<tr class="vat-row">
<td class="label">{{ L_VAT }}</td>
<td class="value">{{ quotation.vat_amount | format_money }}</td>
</tr>
{% if quotation.extras_cost and quotation.extras_cost > 0 %}
<tr>
<td class="label">{{ L_EXTRAS }}</td>
<td class="value">{{ quotation.extras_cost | format_money }}</td>
</tr>
{% endif %}
<tr class="final-row">
<td class="label">{{ L_FINAL }}</td>
<td class="value">{{ quotation.final_total | format_money }}</td>
</tr>
</table>
</div>
<!-- SPACER: flexible gap between totals and notes -->
<div class="main-content-gap"></div>
<!-- COMMENTS / NOTES -->
{% set qn = quotation.quick_notes or {} %}
{% set has_quick = (qn.payment_advance and qn.payment_advance.enabled) or (qn.lead_time and qn.lead_time.enabled) or (qn.backup_relays and qn.backup_relays.enabled) %}
{% set has_comments = quotation.comments and quotation.comments | length > 0 %}
{% if has_quick or has_comments %}
<div class="comments-section">
<div class="section-title">{{ L_COMMENTS }}</div>
<ul>
{# ── Quick Notes ── #}
{# Payment Advance #}
{% if qn.payment_advance and qn.payment_advance.enabled %}
{% set pct = qn.payment_advance.percent | string %}
{% if lang == 'gr' %}
<li>Απαιτείται προκαταβολή <strong>{{ pct }}%</strong> με την επιβεβαίωση της παραγγελίας.</li>
{% else %}
<li><strong>{{ pct }}%</strong> advance payment is required upon order confirmation.</li>
{% endif %}
{% endif %}
{# Lead Time #}
{% if qn.lead_time and qn.lead_time.enabled %}
{% set days = qn.lead_time.days | string %}
{% if lang == 'gr' %}
<li>Εκτιμώμενος χρόνος παράδοσης, <strong>{{ days }} εργάσιμες ημέρες</strong> από την επιβεβαίωση της παραγγελίας και παραλαβή της προκαταβολής.</li>
{% else %}
<li>Estimated delivery time is <strong>{{ days }} working days</strong> from order confirmation and receipt of advance payment.</li>
{% endif %}
{% endif %}
{# Backup Relays #}
{% if qn.backup_relays and qn.backup_relays.enabled %}
{% set n = qn.backup_relays.count | int %}
{% if lang == 'gr' %}
{% if n == 1 %}
<li>Συμπεριλαμβάνονται: <strong>{{ n }} έξτρα Εφεδρικό Ρελέ Ισχύος</strong></li>
{% else %}
<li>Συμπεριλαμβάνονται: <strong>{{ n }} έξτρα Εφεδρικά Ρελέ Ισχύος</strong></li>
{% endif %}
{% else %}
{% if n == 1 %}
<li><strong>{{ n }} Extra Relay</strong> included as Backup, free of charge.</li>
{% else %}
<li><strong>{{ n }} Extra Relays</strong> included as Backups, free of charge.</li>
{% endif %}
{% endif %}
{% endif %}
{# ── Dynamic comments ── #}
{% if has_comments %}
{% for comment in quotation.comments %}
{% if comment and comment.strip() %}
<li>{{ comment }}</li>
{% endif %}
{% endfor %}
{% endif %}
</ul>
</div>
{% endif %}
<!-- VALIDITY -->
<div class="footer">
<span class="validity">{{ L_VALIDITY }}</span>
</div>
<!-- FIXED BOTTOM FOOTER: contact + payment (repeats every page) -->
<div class="fixed-footer">
<div class="footer-block">
<div class="footer-block-title">{{ L_CONTACT_INFO }}</div>
<dl>
<dt>{% if lang == 'gr' %}Εταιρεία{% else %}Company{% endif %}</dt>
<dd>BellSystems</dd>
<dt>{% if lang == 'gr' %}Τηλ.{% else %}Phone{% endif %}</dt>
<dd>+(30) 26410 33344</dd>
<dt>{% if lang == 'gr' %}Email{% else %}Email{% endif %}</dt>
<dd>sales@bellsystems.gr</dd>
<dt>Web</dt>
<dd>www.bellsystems.gr</dd>
<dt>Links</dt>
<dd><img src="./linktree.png" alt="linktr.ee/bellsystems" style="height: 7pt; vertical-align: middle;"/></dd>
</dl>
</div>
<div class="footer-block">
<div class="footer-block-title">{{ L_PAYMENT_INFO }}</div>
<dl>
<dt>{% if lang == 'gr' %}Τράπεζα{% else %}Bank{% endif %}</dt>
<dd>Piraeus Bank</dd>
<dt>{% if lang == 'gr' %}Δικαιούχος{% else %}Holder{% endif %}</dt>
<dd>Pontikas Georgios</dd>
<dt>{% if lang == 'gr' %}Αριθμός{% else %}Account No.{% endif %}</dt>
<dd>6264-1484-35226</dd>
<dt>IBAN</dt>
<dd>GR8101712640006264148435226</dd>
<dt>BIC/SWIFT</dt>
<dd>PIRBGRAA</dd>
</dl>
</div>
<div class="footer-ref">
<span class="ref-quot">{{ quotation.quotation_number }}</span>
<span class="ref-page">{% if lang == 'gr' %}Σελίδα {% else %}Page {% endif %}</span>
</div>
</div>
</body>
</html>

View File

@@ -181,7 +181,7 @@ def generate(serial_number: str, hw_type: str, hw_version: str) -> bytes:
"""Generate a 0x5000-byte NVS partition binary for a Vesper device.
serial_number: full SN string e.g. 'PV-26B27-VS01R-X7KQA'
hw_type: lowercase board type e.g. 'vs', 'vp', 'vx'
hw_type: board type e.g. 'vesper', 'vesper_plus', 'vesper_pro'
hw_version: zero-padded version e.g. '01'
Returns raw bytes ready to flash at 0x9000.