update: Major Overhault to all subsystems
This commit is contained in:
0
backend/crm/__init__.py
Normal file
0
backend/crm/__init__.py
Normal file
417
backend/crm/comms_router.py
Normal file
417
backend/crm/comms_router.py
Normal file
@@ -0,0 +1,417 @@
|
||||
import base64
|
||||
import json
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, Form, File, UploadFile
|
||||
from pydantic import BaseModel
|
||||
from typing import List, Optional
|
||||
|
||||
from auth.models import TokenPayload
|
||||
from auth.dependencies import require_permission
|
||||
from config import settings
|
||||
from crm.models import CommCreate, CommUpdate, CommInDB, CommListResponse, MediaCreate, MediaDirection
|
||||
from crm import service
|
||||
from crm import email_sync
|
||||
from crm.mail_accounts import get_mail_accounts
|
||||
|
||||
router = APIRouter(prefix="/api/crm/comms", tags=["crm-comms"])
|
||||
|
||||
|
||||
class EmailSendResponse(BaseModel):
|
||||
entry: dict
|
||||
|
||||
|
||||
class EmailSyncResponse(BaseModel):
|
||||
new_count: int
|
||||
|
||||
|
||||
class MailListResponse(BaseModel):
|
||||
entries: list
|
||||
total: int
|
||||
|
||||
|
||||
@router.get("/all", response_model=CommListResponse)
|
||||
async def list_all_comms(
|
||||
type: Optional[str] = Query(None),
|
||||
direction: Optional[str] = Query(None),
|
||||
limit: int = Query(200, le=500),
|
||||
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||
):
|
||||
entries = await service.list_all_comms(type=type, direction=direction, limit=limit)
|
||||
return CommListResponse(entries=entries, total=len(entries))
|
||||
|
||||
|
||||
@router.get("", response_model=CommListResponse)
|
||||
async def list_comms(
|
||||
customer_id: str = Query(...),
|
||||
type: Optional[str] = Query(None),
|
||||
direction: Optional[str] = Query(None),
|
||||
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||
):
|
||||
entries = await service.list_comms(customer_id=customer_id, type=type, direction=direction)
|
||||
return CommListResponse(entries=entries, total=len(entries))
|
||||
|
||||
|
||||
@router.post("", response_model=CommInDB, status_code=201)
|
||||
async def create_comm(
|
||||
body: CommCreate,
|
||||
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||
):
|
||||
return await service.create_comm(body)
|
||||
|
||||
|
||||
@router.get("/email/all", response_model=MailListResponse)
|
||||
async def list_all_emails(
|
||||
direction: Optional[str] = Query(None),
|
||||
customers_only: bool = Query(False),
|
||||
mailbox: Optional[str] = Query(None, description="sales|support|both|all or account key"),
|
||||
limit: int = Query(500, le=1000),
|
||||
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||
):
|
||||
"""Return all email comms (all senders + unmatched), for the Mail page."""
|
||||
selected_accounts = None
|
||||
if mailbox and mailbox not in {"all", "both"}:
|
||||
if mailbox == "sales":
|
||||
selected_accounts = ["sales"]
|
||||
elif mailbox == "support":
|
||||
selected_accounts = ["support"]
|
||||
else:
|
||||
selected_accounts = [mailbox]
|
||||
entries = await service.list_all_emails(
|
||||
direction=direction,
|
||||
customers_only=customers_only,
|
||||
mail_accounts=selected_accounts,
|
||||
limit=limit,
|
||||
)
|
||||
return MailListResponse(entries=entries, total=len(entries))
|
||||
|
||||
|
||||
@router.get("/email/accounts")
|
||||
async def list_mail_accounts(
|
||||
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||
):
|
||||
accounts = get_mail_accounts()
|
||||
return {
|
||||
"accounts": [
|
||||
{
|
||||
"key": a["key"],
|
||||
"label": a["label"],
|
||||
"email": a["email"],
|
||||
"sync_inbound": bool(a.get("sync_inbound")),
|
||||
"allow_send": bool(a.get("allow_send")),
|
||||
}
|
||||
for a in accounts
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
@router.get("/email/check")
|
||||
async def check_new_emails(
|
||||
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||
):
|
||||
"""Lightweight check: returns how many emails are on the server vs. stored locally."""
|
||||
return await email_sync.check_new_emails()
|
||||
|
||||
|
||||
# Email endpoints — must be before /{comm_id} wildcard routes
|
||||
@router.post("/email/send", response_model=EmailSendResponse)
|
||||
async def send_email_endpoint(
|
||||
customer_id: Optional[str] = Form(None),
|
||||
from_account: Optional[str] = Form(None),
|
||||
to: str = Form(...),
|
||||
subject: str = Form(...),
|
||||
body: str = Form(...),
|
||||
body_html: str = Form(""),
|
||||
cc: str = Form("[]"), # JSON-encoded list of strings
|
||||
files: List[UploadFile] = File(default=[]),
|
||||
user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||
):
|
||||
if not get_mail_accounts():
|
||||
raise HTTPException(status_code=503, detail="SMTP not configured")
|
||||
try:
|
||||
cc_list: List[str] = json.loads(cc) if cc else []
|
||||
except Exception:
|
||||
cc_list = []
|
||||
|
||||
# Read all uploaded files into memory
|
||||
file_attachments = []
|
||||
for f in files:
|
||||
content = await f.read()
|
||||
mime_type = f.content_type or "application/octet-stream"
|
||||
file_attachments.append((f.filename, content, mime_type))
|
||||
|
||||
from crm.email_sync import send_email
|
||||
try:
|
||||
entry = await send_email(
|
||||
customer_id=customer_id or None,
|
||||
from_account=from_account,
|
||||
to=to,
|
||||
subject=subject,
|
||||
body=body,
|
||||
body_html=body_html,
|
||||
cc=cc_list,
|
||||
sent_by=user.name or user.sub,
|
||||
file_attachments=file_attachments if file_attachments else None,
|
||||
)
|
||||
except RuntimeError as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
return EmailSendResponse(entry=entry)
|
||||
|
||||
|
||||
@router.post("/email/sync", response_model=EmailSyncResponse)
|
||||
async def sync_email_endpoint(
|
||||
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||
):
|
||||
if not get_mail_accounts():
|
||||
raise HTTPException(status_code=503, detail="IMAP not configured")
|
||||
from crm.email_sync import sync_emails
|
||||
new_count = await sync_emails()
|
||||
return EmailSyncResponse(new_count=new_count)
|
||||
|
||||
|
||||
class SaveInlineRequest(BaseModel):
|
||||
data_uri: str
|
||||
filename: str
|
||||
subfolder: str = "received_media"
|
||||
mime_type: Optional[str] = None
|
||||
|
||||
|
||||
async def _resolve_customer_folder(customer_id: str) -> str:
|
||||
"""Return the Nextcloud folder_id for a customer (falls back to customer_id)."""
|
||||
from shared.firebase import get_db as get_firestore
|
||||
firestore_db = get_firestore()
|
||||
doc = firestore_db.collection("crm_customers").document(customer_id).get()
|
||||
if not doc.exists:
|
||||
raise HTTPException(status_code=404, detail="Customer not found")
|
||||
data = doc.to_dict()
|
||||
return data.get("folder_id") or customer_id
|
||||
|
||||
|
||||
async def _upload_to_nc(folder_id: str, subfolder: str, filename: str,
|
||||
content: bytes, mime_type: str, customer_id: str,
|
||||
uploaded_by: str, tags: list[str]) -> dict:
|
||||
from crm import nextcloud
|
||||
target_folder = f"customers/{folder_id}/{subfolder}"
|
||||
file_path = f"{target_folder}/{filename}"
|
||||
await nextcloud.ensure_folder(target_folder)
|
||||
await nextcloud.upload_file(file_path, content, mime_type)
|
||||
media = await service.create_media(MediaCreate(
|
||||
customer_id=customer_id,
|
||||
filename=filename,
|
||||
nextcloud_path=file_path,
|
||||
mime_type=mime_type,
|
||||
direction=MediaDirection.received,
|
||||
tags=tags,
|
||||
uploaded_by=uploaded_by,
|
||||
))
|
||||
return {"ok": True, "media_id": media.id, "nextcloud_path": file_path}
|
||||
|
||||
|
||||
@router.post("/email/{comm_id}/save-inline")
|
||||
async def save_email_inline_image(
|
||||
comm_id: str,
|
||||
body: SaveInlineRequest,
|
||||
user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||
):
|
||||
"""Save an inline image (data-URI from email HTML body) to Nextcloud."""
|
||||
comm = await service.get_comm(comm_id)
|
||||
customer_id = comm.customer_id
|
||||
if not customer_id:
|
||||
raise HTTPException(status_code=400, detail="This email is not linked to a customer")
|
||||
|
||||
folder_id = await _resolve_customer_folder(customer_id)
|
||||
|
||||
# Parse data URI
|
||||
data_uri = body.data_uri
|
||||
mime_type = body.mime_type or "image/png"
|
||||
if "," in data_uri:
|
||||
header, encoded = data_uri.split(",", 1)
|
||||
try:
|
||||
mime_type = header.split(":")[1].split(";")[0]
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
encoded = data_uri
|
||||
try:
|
||||
content = base64.b64decode(encoded)
|
||||
except Exception:
|
||||
raise HTTPException(status_code=400, detail="Invalid base64 data")
|
||||
|
||||
return await _upload_to_nc(
|
||||
folder_id, body.subfolder, body.filename,
|
||||
content, mime_type, customer_id,
|
||||
user.name or user.sub, ["email-inline-image"],
|
||||
)
|
||||
|
||||
|
||||
@router.post("/email/{comm_id}/save-attachment/{attachment_index}")
|
||||
async def save_email_attachment(
|
||||
comm_id: str,
|
||||
attachment_index: int,
|
||||
filename: str = Form(...),
|
||||
subfolder: str = Form("received_media"),
|
||||
user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||
):
|
||||
"""
|
||||
Re-fetch a specific attachment from IMAP (by index in the email's attachment list)
|
||||
and save it to the customer's Nextcloud media folder.
|
||||
"""
|
||||
import asyncio
|
||||
comm = await service.get_comm(comm_id)
|
||||
customer_id = comm.customer_id
|
||||
if not customer_id:
|
||||
raise HTTPException(status_code=400, detail="This email is not linked to a customer")
|
||||
|
||||
ext_message_id = comm.ext_message_id
|
||||
if not ext_message_id:
|
||||
raise HTTPException(status_code=400, detail="No message ID stored for this email")
|
||||
|
||||
attachments_meta = comm.attachments or []
|
||||
if attachment_index < 0 or attachment_index >= len(attachments_meta):
|
||||
raise HTTPException(status_code=400, detail="Attachment index out of range")
|
||||
|
||||
att_meta = attachments_meta[attachment_index]
|
||||
mime_type = att_meta.content_type or "application/octet-stream"
|
||||
from crm.mail_accounts import account_by_key, account_by_email
|
||||
account = account_by_key(comm.mail_account) or account_by_email(comm.from_addr)
|
||||
if not account:
|
||||
raise HTTPException(status_code=400, detail="Email account config not found for this message")
|
||||
|
||||
# Re-fetch from IMAP in executor
|
||||
def _fetch_attachment():
|
||||
import imaplib, email as _email
|
||||
if account.get("imap_use_ssl"):
|
||||
imap = imaplib.IMAP4_SSL(account["imap_host"], int(account["imap_port"]))
|
||||
else:
|
||||
imap = imaplib.IMAP4(account["imap_host"], int(account["imap_port"]))
|
||||
imap.login(account["imap_username"], account["imap_password"])
|
||||
imap.select(account.get("imap_inbox", "INBOX"))
|
||||
|
||||
# Search by Message-ID header
|
||||
_, data = imap.search(None, f'HEADER Message-ID "{ext_message_id}"')
|
||||
uids = data[0].split() if data[0] else []
|
||||
if not uids:
|
||||
raise ValueError(f"Message not found on IMAP server: {ext_message_id}")
|
||||
|
||||
_, msg_data = imap.fetch(uids[0], "(RFC822)")
|
||||
raw = msg_data[0][1]
|
||||
msg = _email.message_from_bytes(raw)
|
||||
imap.logout()
|
||||
|
||||
# Walk attachments in order — find the one at attachment_index
|
||||
found_idx = 0
|
||||
for part in msg.walk():
|
||||
cd = str(part.get("Content-Disposition", ""))
|
||||
if "attachment" not in cd:
|
||||
continue
|
||||
if found_idx == attachment_index:
|
||||
payload = part.get_payload(decode=True)
|
||||
if payload is None:
|
||||
raise ValueError("Attachment payload is empty")
|
||||
return payload
|
||||
found_idx += 1
|
||||
|
||||
raise ValueError(f"Attachment index {attachment_index} not found in message")
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
try:
|
||||
content = await loop.run_in_executor(None, _fetch_attachment)
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=404, detail=str(e))
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=502, detail=f"IMAP fetch failed: {e}")
|
||||
|
||||
folder_id = await _resolve_customer_folder(customer_id)
|
||||
return await _upload_to_nc(
|
||||
folder_id, subfolder, filename,
|
||||
content, mime_type, customer_id,
|
||||
user.name or user.sub, ["email-attachment"],
|
||||
)
|
||||
|
||||
|
||||
class BulkDeleteRequest(BaseModel):
|
||||
ids: List[str]
|
||||
|
||||
|
||||
class ToggleImportantRequest(BaseModel):
|
||||
important: bool
|
||||
|
||||
|
||||
class ToggleReadRequest(BaseModel):
|
||||
read: bool
|
||||
|
||||
|
||||
@router.post("/bulk-delete", status_code=200)
|
||||
async def bulk_delete_comms(
|
||||
body: BulkDeleteRequest,
|
||||
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||
):
|
||||
# Try remote IMAP delete for email rows first (best-effort), then local delete.
|
||||
for comm_id in body.ids:
|
||||
try:
|
||||
comm = await service.get_comm(comm_id)
|
||||
if comm.type == "email" and comm.ext_message_id:
|
||||
await email_sync.delete_remote_email(
|
||||
comm.ext_message_id,
|
||||
comm.mail_account,
|
||||
comm.from_addr,
|
||||
)
|
||||
except Exception:
|
||||
# Keep delete resilient; local delete still proceeds.
|
||||
pass
|
||||
count = await service.delete_comms_bulk(body.ids)
|
||||
return {"deleted": count}
|
||||
|
||||
|
||||
@router.patch("/{comm_id}/important", response_model=CommInDB)
|
||||
async def set_comm_important(
|
||||
comm_id: str,
|
||||
body: ToggleImportantRequest,
|
||||
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||
):
|
||||
return await service.set_comm_important(comm_id, body.important)
|
||||
|
||||
|
||||
@router.patch("/{comm_id}/read", response_model=CommInDB)
|
||||
async def set_comm_read(
|
||||
comm_id: str,
|
||||
body: ToggleReadRequest,
|
||||
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||
):
|
||||
try:
|
||||
comm = await service.get_comm(comm_id)
|
||||
if comm.type == "email" and comm.ext_message_id:
|
||||
await email_sync.set_remote_read(
|
||||
comm.ext_message_id,
|
||||
comm.mail_account,
|
||||
comm.from_addr,
|
||||
body.read,
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
return await service.set_comm_read(comm_id, body.read)
|
||||
|
||||
|
||||
@router.put("/{comm_id}", response_model=CommInDB)
|
||||
async def update_comm(
|
||||
comm_id: str,
|
||||
body: CommUpdate,
|
||||
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||
):
|
||||
return await service.update_comm(comm_id, body)
|
||||
|
||||
|
||||
@router.delete("/{comm_id}", status_code=204)
|
||||
async def delete_comm(
|
||||
comm_id: str,
|
||||
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||
):
|
||||
try:
|
||||
comm = await service.get_comm(comm_id)
|
||||
if comm.type == "email" and comm.ext_message_id:
|
||||
await email_sync.delete_remote_email(
|
||||
comm.ext_message_id,
|
||||
comm.mail_account,
|
||||
comm.from_addr,
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
await service.delete_comm(comm_id)
|
||||
71
backend/crm/customers_router.py
Normal file
71
backend/crm/customers_router.py
Normal file
@@ -0,0 +1,71 @@
|
||||
import asyncio
|
||||
import logging
|
||||
from fastapi import APIRouter, Depends, Query, BackgroundTasks
|
||||
from typing import Optional
|
||||
|
||||
from auth.models import TokenPayload
|
||||
from auth.dependencies import require_permission
|
||||
from crm.models import CustomerCreate, CustomerUpdate, CustomerInDB, CustomerListResponse
|
||||
from crm import service, nextcloud
|
||||
from config import settings
|
||||
|
||||
router = APIRouter(prefix="/api/crm/customers", tags=["crm-customers"])
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@router.get("", response_model=CustomerListResponse)
|
||||
def list_customers(
|
||||
search: Optional[str] = Query(None),
|
||||
tag: Optional[str] = Query(None),
|
||||
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||
):
|
||||
customers = service.list_customers(search=search, tag=tag)
|
||||
return CustomerListResponse(customers=customers, total=len(customers))
|
||||
|
||||
|
||||
@router.get("/{customer_id}", response_model=CustomerInDB)
|
||||
def get_customer(
|
||||
customer_id: str,
|
||||
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||
):
|
||||
return service.get_customer(customer_id)
|
||||
|
||||
|
||||
@router.post("", response_model=CustomerInDB, status_code=201)
|
||||
async def create_customer(
|
||||
body: CustomerCreate,
|
||||
background_tasks: BackgroundTasks,
|
||||
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||
):
|
||||
customer = service.create_customer(body)
|
||||
if settings.nextcloud_url:
|
||||
background_tasks.add_task(_init_nextcloud_folder, customer)
|
||||
return customer
|
||||
|
||||
|
||||
async def _init_nextcloud_folder(customer) -> None:
|
||||
try:
|
||||
nc_path = service.get_customer_nc_path(customer)
|
||||
base = f"customers/{nc_path}"
|
||||
for sub in ("media", "documents", "sent", "received"):
|
||||
await nextcloud.ensure_folder(f"{base}/{sub}")
|
||||
await nextcloud.write_info_file(base, customer.name, customer.id)
|
||||
except Exception as e:
|
||||
logger.warning("Nextcloud folder init failed for customer %s: %s", customer.id, e)
|
||||
|
||||
|
||||
@router.put("/{customer_id}", response_model=CustomerInDB)
|
||||
def update_customer(
|
||||
customer_id: str,
|
||||
body: CustomerUpdate,
|
||||
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||
):
|
||||
return service.update_customer(customer_id, body)
|
||||
|
||||
|
||||
@router.delete("/{customer_id}", status_code=204)
|
||||
def delete_customer(
|
||||
customer_id: str,
|
||||
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||
):
|
||||
service.delete_customer(customer_id)
|
||||
837
backend/crm/email_sync.py
Normal file
837
backend/crm/email_sync.py
Normal file
@@ -0,0 +1,837 @@
|
||||
"""
|
||||
IMAP email sync and SMTP email send for CRM.
|
||||
Uses only stdlib imaplib/smtplib — no extra dependencies.
|
||||
Sync is run in an executor to avoid blocking the event loop.
|
||||
"""
|
||||
import asyncio
|
||||
import base64
|
||||
import email
|
||||
import email.header
|
||||
import email.utils
|
||||
import html.parser
|
||||
import imaplib
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
import smtplib
|
||||
import uuid
|
||||
from datetime import datetime, timezone
|
||||
from email.mime.base import MIMEBase
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
from email.mime.text import MIMEText
|
||||
from email import encoders
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
from config import settings
|
||||
from mqtt import database as mqtt_db
|
||||
from crm.mail_accounts import get_mail_accounts, account_by_key, account_by_email
|
||||
|
||||
logger = logging.getLogger("crm.email_sync")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _decode_header(raw: str) -> str:
|
||||
"""Decode an RFC2047-encoded email header value."""
|
||||
if not raw:
|
||||
return ""
|
||||
parts = email.header.decode_header(raw)
|
||||
decoded = []
|
||||
for part, enc in parts:
|
||||
if isinstance(part, bytes):
|
||||
decoded.append(part.decode(enc or "utf-8", errors="replace"))
|
||||
else:
|
||||
decoded.append(part)
|
||||
return " ".join(decoded)
|
||||
|
||||
|
||||
class _HTMLStripper(html.parser.HTMLParser):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self._text = []
|
||||
|
||||
def handle_data(self, data):
|
||||
self._text.append(data)
|
||||
|
||||
def get_text(self):
|
||||
return " ".join(self._text)
|
||||
|
||||
|
||||
def _strip_html(html_str: str) -> str:
|
||||
s = _HTMLStripper()
|
||||
s.feed(html_str)
|
||||
return s.get_text()
|
||||
|
||||
|
||||
def _extract_inline_data_images(html_body: str) -> tuple[str, list[tuple[str, bytes, str]]]:
|
||||
"""Replace data-URI images in HTML with cid: references and return inline parts.
|
||||
Returns: (new_html, [(cid, image_bytes, mime_type), ...])
|
||||
"""
|
||||
if not html_body:
|
||||
return "", []
|
||||
|
||||
inline_parts: list[tuple[str, bytes, str]] = []
|
||||
seen: dict[str, str] = {} # data-uri -> cid
|
||||
|
||||
src_pattern = re.compile(r"""src=(['"])(data:image/[^'"]+)\1""", re.IGNORECASE)
|
||||
data_pattern = re.compile(r"^data:(image/[a-zA-Z0-9.+-]+);base64,(.+)$", re.IGNORECASE | re.DOTALL)
|
||||
|
||||
def _replace(match: re.Match) -> str:
|
||||
quote = match.group(1)
|
||||
data_uri = match.group(2)
|
||||
|
||||
if data_uri in seen:
|
||||
cid = seen[data_uri]
|
||||
return f"src={quote}cid:{cid}{quote}"
|
||||
|
||||
parsed = data_pattern.match(data_uri)
|
||||
if not parsed:
|
||||
return match.group(0)
|
||||
|
||||
mime_type = parsed.group(1).lower()
|
||||
b64_data = parsed.group(2).strip()
|
||||
try:
|
||||
payload = base64.b64decode(b64_data, validate=False)
|
||||
except Exception:
|
||||
return match.group(0)
|
||||
|
||||
cid = f"inline-{uuid.uuid4().hex}"
|
||||
seen[data_uri] = cid
|
||||
inline_parts.append((cid, payload, mime_type))
|
||||
return f"src={quote}cid:{cid}{quote}"
|
||||
|
||||
return src_pattern.sub(_replace, html_body), inline_parts
|
||||
|
||||
|
||||
def _load_customer_email_map() -> dict[str, str]:
|
||||
"""Build a lookup of customer email -> customer_id from Firestore."""
|
||||
from shared.firebase import get_db as get_firestore
|
||||
firestore_db = get_firestore()
|
||||
addr_to_customer: dict[str, str] = {}
|
||||
for doc in firestore_db.collection("crm_customers").stream():
|
||||
data = doc.to_dict() or {}
|
||||
for contact in (data.get("contacts") or []):
|
||||
if contact.get("type") == "email" and contact.get("value"):
|
||||
addr_to_customer[str(contact["value"]).strip().lower()] = doc.id
|
||||
return addr_to_customer
|
||||
|
||||
|
||||
def _get_body(msg: email.message.Message) -> tuple[str, str]:
|
||||
"""Extract (plain_text, html_body) from an email message.
|
||||
Inline images (cid: references) are substituted with data-URIs so they
|
||||
render correctly in a sandboxed iframe without external requests.
|
||||
"""
|
||||
import base64 as _b64
|
||||
plain = None
|
||||
html_body = None
|
||||
# Map Content-ID → data-URI for inline images
|
||||
cid_map: dict[str, str] = {}
|
||||
|
||||
if msg.is_multipart():
|
||||
for part in msg.walk():
|
||||
ct = part.get_content_type()
|
||||
cd = str(part.get("Content-Disposition", ""))
|
||||
cid = part.get("Content-ID", "").strip().strip("<>")
|
||||
|
||||
if "attachment" in cd:
|
||||
continue
|
||||
|
||||
if ct == "text/plain" and plain is None:
|
||||
raw = part.get_payload(decode=True)
|
||||
charset = part.get_content_charset() or "utf-8"
|
||||
plain = raw.decode(charset, errors="replace")
|
||||
elif ct == "text/html" and html_body is None:
|
||||
raw = part.get_payload(decode=True)
|
||||
charset = part.get_content_charset() or "utf-8"
|
||||
html_body = raw.decode(charset, errors="replace")
|
||||
elif ct.startswith("image/") and cid:
|
||||
raw = part.get_payload(decode=True)
|
||||
if raw:
|
||||
b64 = _b64.b64encode(raw).decode("ascii")
|
||||
cid_map[cid] = f"data:{ct};base64,{b64}"
|
||||
else:
|
||||
ct = msg.get_content_type()
|
||||
payload = msg.get_payload(decode=True)
|
||||
charset = msg.get_content_charset() or "utf-8"
|
||||
if payload:
|
||||
text = payload.decode(charset, errors="replace")
|
||||
if ct == "text/plain":
|
||||
plain = text
|
||||
elif ct == "text/html":
|
||||
html_body = text
|
||||
|
||||
# Substitute cid: references with data-URIs
|
||||
if html_body and cid_map:
|
||||
for cid, data_uri in cid_map.items():
|
||||
html_body = html_body.replace(f"cid:{cid}", data_uri)
|
||||
|
||||
plain_text = (plain or (html_body and _strip_html(html_body)) or "").strip()
|
||||
return plain_text, (html_body or "").strip()
|
||||
|
||||
|
||||
def _get_attachments(msg: email.message.Message) -> list[dict]:
|
||||
"""Extract attachment info (filename, content_type, size) without storing content."""
|
||||
attachments = []
|
||||
if msg.is_multipart():
|
||||
for part in msg.walk():
|
||||
cd = str(part.get("Content-Disposition", ""))
|
||||
if "attachment" in cd:
|
||||
filename = part.get_filename() or "attachment"
|
||||
filename = _decode_header(filename)
|
||||
ct = part.get_content_type() or "application/octet-stream"
|
||||
payload = part.get_payload(decode=True)
|
||||
size = len(payload) if payload else 0
|
||||
attachments.append({"filename": filename, "content_type": ct, "size": size})
|
||||
return attachments
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# IMAP sync (synchronous — called via run_in_executor)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _sync_account_emails_sync(account: dict) -> tuple[list[dict], bool]:
|
||||
if not account.get("imap_host") or not account.get("imap_username") or not account.get("imap_password"):
|
||||
return [], False
|
||||
if account.get("imap_use_ssl"):
|
||||
imap = imaplib.IMAP4_SSL(account["imap_host"], int(account["imap_port"]))
|
||||
else:
|
||||
imap = imaplib.IMAP4(account["imap_host"], int(account["imap_port"]))
|
||||
imap.login(account["imap_username"], account["imap_password"])
|
||||
# readonly=True prevents marking messages as \Seen while syncing.
|
||||
imap.select(account.get("imap_inbox", "INBOX"), readonly=True)
|
||||
_, data = imap.search(None, "ALL")
|
||||
uids = data[0].split() if data[0] else []
|
||||
|
||||
results = []
|
||||
complete = True
|
||||
for uid in uids:
|
||||
try:
|
||||
_, msg_data = imap.fetch(uid, "(FLAGS RFC822)")
|
||||
meta = msg_data[0][0] if msg_data and isinstance(msg_data[0], tuple) else b""
|
||||
raw = msg_data[0][1]
|
||||
msg = email.message_from_bytes(raw)
|
||||
message_id = msg.get("Message-ID", "").strip()
|
||||
from_addr = email.utils.parseaddr(msg.get("From", ""))[1]
|
||||
to_addrs_raw = msg.get("To", "")
|
||||
to_addrs = [a for _, a in email.utils.getaddresses([to_addrs_raw])]
|
||||
subject = _decode_header(msg.get("Subject", ""))
|
||||
date_str = msg.get("Date", "")
|
||||
try:
|
||||
occurred_at = email.utils.parsedate_to_datetime(date_str).isoformat()
|
||||
except Exception:
|
||||
occurred_at = datetime.now(timezone.utc).isoformat()
|
||||
is_read = b"\\Seen" in (meta or b"")
|
||||
try:
|
||||
body, body_html = _get_body(msg)
|
||||
except Exception:
|
||||
body, body_html = "", ""
|
||||
try:
|
||||
file_attachments = _get_attachments(msg)
|
||||
except Exception:
|
||||
file_attachments = []
|
||||
results.append({
|
||||
"mail_account": account["key"],
|
||||
"message_id": message_id,
|
||||
"from_addr": from_addr,
|
||||
"to_addrs": to_addrs,
|
||||
"subject": subject,
|
||||
"body": body,
|
||||
"body_html": body_html,
|
||||
"attachments": file_attachments,
|
||||
"occurred_at": occurred_at,
|
||||
"is_read": bool(is_read),
|
||||
})
|
||||
except Exception as e:
|
||||
complete = False
|
||||
logger.warning(f"[EMAIL SYNC] Failed to parse message uid={uid} account={account['key']}: {e}")
|
||||
imap.logout()
|
||||
return results, complete
|
||||
|
||||
|
||||
def _sync_emails_sync() -> tuple[list[dict], bool]:
|
||||
all_msgs: list[dict] = []
|
||||
all_complete = True
|
||||
# Deduplicate by physical inbox source. Aliases often share the same mailbox.
|
||||
seen_sources: set[tuple] = set()
|
||||
for acc in get_mail_accounts():
|
||||
if not acc.get("sync_inbound"):
|
||||
continue
|
||||
source = (
|
||||
(acc.get("imap_host") or "").lower(),
|
||||
int(acc.get("imap_port") or 0),
|
||||
(acc.get("imap_username") or "").lower(),
|
||||
(acc.get("imap_inbox") or "INBOX").upper(),
|
||||
)
|
||||
if source in seen_sources:
|
||||
continue
|
||||
seen_sources.add(source)
|
||||
msgs, complete = _sync_account_emails_sync(acc)
|
||||
all_msgs.extend(msgs)
|
||||
all_complete = all_complete and complete
|
||||
return all_msgs, all_complete
|
||||
|
||||
|
||||
async def sync_emails() -> int:
|
||||
"""
|
||||
Pull emails from IMAP, match against CRM customers, store new ones.
|
||||
Returns count of new entries created.
|
||||
"""
|
||||
if not get_mail_accounts():
|
||||
return 0
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
try:
|
||||
messages, fetch_complete = await loop.run_in_executor(None, _sync_emails_sync)
|
||||
except Exception as e:
|
||||
logger.error(f"[EMAIL SYNC] IMAP connect/fetch failed: {e}")
|
||||
raise
|
||||
|
||||
db = await mqtt_db.get_db()
|
||||
|
||||
# Load all customer email contacts into a flat lookup: email -> customer_id
|
||||
addr_to_customer = _load_customer_email_map()
|
||||
|
||||
# Load already-synced message-ids from DB
|
||||
rows = await db.execute_fetchall(
|
||||
"SELECT id, ext_message_id, COALESCE(mail_account, '') as mail_account, direction, is_read, customer_id "
|
||||
"FROM crm_comms_log WHERE type='email' AND ext_message_id IS NOT NULL"
|
||||
)
|
||||
known_map = {
|
||||
(r[1], r[2] or ""): {
|
||||
"id": r[0],
|
||||
"direction": r[3],
|
||||
"is_read": int(r[4] or 0),
|
||||
"customer_id": r[5],
|
||||
}
|
||||
for r in rows
|
||||
}
|
||||
|
||||
new_count = 0
|
||||
now = datetime.now(timezone.utc).isoformat()
|
||||
server_ids_by_account: dict[str, set[str]] = {}
|
||||
# Global inbound IDs from server snapshot, used to avoid account-classification delete oscillation.
|
||||
inbound_server_ids: set[str] = set()
|
||||
accounts = get_mail_accounts()
|
||||
accounts_by_email = {a["email"].lower(): a for a in accounts}
|
||||
# Initialize tracked inbound accounts even if inbox is empty.
|
||||
for a in accounts:
|
||||
if a.get("sync_inbound"):
|
||||
server_ids_by_account[a["key"]] = set()
|
||||
|
||||
for msg in messages:
|
||||
mid = msg["message_id"]
|
||||
fetch_account_key = (msg.get("mail_account") or "").strip().lower()
|
||||
from_addr = msg["from_addr"].lower()
|
||||
to_addrs = [a.lower() for a in msg["to_addrs"]]
|
||||
|
||||
sender_acc = accounts_by_email.get(from_addr)
|
||||
if sender_acc:
|
||||
direction = "outbound"
|
||||
resolved_account_key = sender_acc["key"]
|
||||
customer_addrs = to_addrs
|
||||
else:
|
||||
direction = "inbound"
|
||||
target_acc = None
|
||||
for addr in to_addrs:
|
||||
if addr in accounts_by_email:
|
||||
target_acc = accounts_by_email[addr]
|
||||
break
|
||||
resolved_account_key = (target_acc["key"] if target_acc else fetch_account_key)
|
||||
customer_addrs = [from_addr]
|
||||
if target_acc and not target_acc.get("sync_inbound"):
|
||||
# Ignore inbound for non-synced aliases (e.g. info/news).
|
||||
continue
|
||||
|
||||
if direction == "inbound" and mid and resolved_account_key in server_ids_by_account:
|
||||
server_ids_by_account[resolved_account_key].add(mid)
|
||||
inbound_server_ids.add(mid)
|
||||
# Find matching customer (may be None - we still store the email)
|
||||
customer_id = None
|
||||
for addr in customer_addrs:
|
||||
if addr in addr_to_customer:
|
||||
customer_id = addr_to_customer[addr]
|
||||
break
|
||||
|
||||
if mid and (mid, resolved_account_key) in known_map:
|
||||
existing = known_map[(mid, resolved_account_key)]
|
||||
# Backfill customer linkage for rows created without customer_id.
|
||||
if customer_id and not existing.get("customer_id"):
|
||||
await db.execute(
|
||||
"UPDATE crm_comms_log SET customer_id=? WHERE id=?",
|
||||
(customer_id, existing["id"]),
|
||||
)
|
||||
# Existing inbound message: sync read/unread state from server.
|
||||
if direction == "inbound":
|
||||
server_read = 1 if msg.get("is_read") else 0
|
||||
await db.execute(
|
||||
"UPDATE crm_comms_log SET is_read=? "
|
||||
"WHERE type='email' AND direction='inbound' AND ext_message_id=? AND mail_account=?",
|
||||
(server_read, mid, resolved_account_key),
|
||||
)
|
||||
continue # already stored
|
||||
|
||||
attachments_json = json.dumps(msg.get("attachments") or [])
|
||||
to_addrs_json = json.dumps(to_addrs)
|
||||
|
||||
entry_id = str(uuid.uuid4())
|
||||
await db.execute(
|
||||
"""INSERT INTO crm_comms_log
|
||||
(id, customer_id, type, mail_account, direction, subject, body, body_html, attachments,
|
||||
ext_message_id, from_addr, to_addrs, logged_by, occurred_at, created_at, is_read)
|
||||
VALUES (?, ?, 'email', ?, ?, ?, ?, ?, ?, ?, ?, ?, 'system', ?, ?, ?)""",
|
||||
(entry_id, customer_id, resolved_account_key, direction, msg["subject"], msg["body"],
|
||||
msg.get("body_html", ""), attachments_json,
|
||||
mid, from_addr, to_addrs_json, msg["occurred_at"], now, 1 if msg.get("is_read") else 0),
|
||||
)
|
||||
new_count += 1
|
||||
|
||||
# Mirror remote deletes based on global inbound message-id snapshot.
|
||||
# To avoid transient IMAP inconsistency causing add/remove oscillation,
|
||||
# require two consecutive "missing" syncs before local deletion.
|
||||
sync_keys = [a["key"] for a in accounts if a.get("sync_inbound")]
|
||||
if sync_keys and fetch_complete:
|
||||
placeholders = ",".join("?" for _ in sync_keys)
|
||||
local_rows = await db.execute_fetchall(
|
||||
f"SELECT id, ext_message_id, mail_account FROM crm_comms_log "
|
||||
f"WHERE type='email' AND direction='inbound' AND mail_account IN ({placeholders}) "
|
||||
"AND ext_message_id IS NOT NULL",
|
||||
sync_keys,
|
||||
)
|
||||
to_delete: list[str] = []
|
||||
for row in local_rows:
|
||||
row_id, ext_id, acc_key = row[0], row[1], row[2]
|
||||
if not ext_id:
|
||||
continue
|
||||
state_key = f"missing_email::{acc_key}::{ext_id}"
|
||||
if ext_id in inbound_server_ids:
|
||||
await db.execute("DELETE FROM crm_sync_state WHERE key = ?", (state_key,))
|
||||
continue
|
||||
prev = await db.execute_fetchall("SELECT value FROM crm_sync_state WHERE key = ?", (state_key,))
|
||||
prev_count = int(prev[0][0]) if prev and (prev[0][0] or "").isdigit() else 0
|
||||
new_count = prev_count + 1
|
||||
await db.execute(
|
||||
"INSERT INTO crm_sync_state (key, value) VALUES (?, ?) "
|
||||
"ON CONFLICT(key) DO UPDATE SET value=excluded.value",
|
||||
(state_key, str(new_count)),
|
||||
)
|
||||
if new_count >= 2:
|
||||
to_delete.append(row_id)
|
||||
await db.execute("DELETE FROM crm_sync_state WHERE key = ?", (state_key,))
|
||||
if to_delete:
|
||||
del_ph = ",".join("?" for _ in to_delete)
|
||||
await db.execute(f"DELETE FROM crm_comms_log WHERE id IN ({del_ph})", to_delete)
|
||||
|
||||
if new_count or server_ids_by_account:
|
||||
await db.commit()
|
||||
|
||||
# Update last sync time
|
||||
await db.execute(
|
||||
"INSERT INTO crm_sync_state (key, value) VALUES ('last_email_sync', ?) "
|
||||
"ON CONFLICT(key) DO UPDATE SET value=excluded.value",
|
||||
(now,),
|
||||
)
|
||||
await db.commit()
|
||||
|
||||
logger.info(f"[EMAIL SYNC] Done — {new_count} new emails stored")
|
||||
return new_count
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Lightweight new-mail check (synchronous — called via run_in_executor)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _check_server_count_sync() -> int:
|
||||
# Keep this for backward compatibility; no longer used by check_new_emails().
|
||||
total = 0
|
||||
seen_sources: set[tuple] = set()
|
||||
for acc in get_mail_accounts():
|
||||
if not acc.get("sync_inbound"):
|
||||
continue
|
||||
source = (
|
||||
(acc.get("imap_host") or "").lower(),
|
||||
int(acc.get("imap_port") or 0),
|
||||
(acc.get("imap_username") or "").lower(),
|
||||
(acc.get("imap_inbox") or "INBOX").upper(),
|
||||
)
|
||||
if source in seen_sources:
|
||||
continue
|
||||
seen_sources.add(source)
|
||||
if acc.get("imap_use_ssl"):
|
||||
imap = imaplib.IMAP4_SSL(acc["imap_host"], int(acc["imap_port"]))
|
||||
else:
|
||||
imap = imaplib.IMAP4(acc["imap_host"], int(acc["imap_port"]))
|
||||
imap.login(acc["imap_username"], acc["imap_password"])
|
||||
imap.select(acc.get("imap_inbox", "INBOX"), readonly=True)
|
||||
_, data = imap.search(None, "ALL")
|
||||
total += len(data[0].split()) if data[0] else 0
|
||||
imap.logout()
|
||||
return total
|
||||
|
||||
|
||||
async def check_new_emails() -> dict:
|
||||
"""
|
||||
Compare server message count vs. locally stored count.
|
||||
Returns {"new_count": int} — does NOT download or store anything.
|
||||
"""
|
||||
if not get_mail_accounts():
|
||||
return {"new_count": 0}
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
try:
|
||||
# Reuse same account-resolution logic as sync to avoid false positives.
|
||||
messages, _ = await loop.run_in_executor(None, _sync_emails_sync)
|
||||
except Exception as e:
|
||||
logger.warning(f"[EMAIL CHECK] IMAP check failed: {e}")
|
||||
raise
|
||||
|
||||
accounts = get_mail_accounts()
|
||||
accounts_by_email = {a["email"].lower(): a for a in accounts}
|
||||
db = await mqtt_db.get_db()
|
||||
rows = await db.execute_fetchall(
|
||||
"SELECT ext_message_id, COALESCE(mail_account, '') as mail_account FROM crm_comms_log "
|
||||
"WHERE type='email' AND ext_message_id IS NOT NULL"
|
||||
)
|
||||
known_ids = {(r[0], r[1] or "") for r in rows}
|
||||
|
||||
new_count = 0
|
||||
for msg in messages:
|
||||
mid = (msg.get("message_id") or "").strip()
|
||||
if not mid:
|
||||
continue
|
||||
fetch_account_key = (msg.get("mail_account") or "").strip().lower()
|
||||
from_addr = (msg.get("from_addr") or "").lower()
|
||||
to_addrs = [(a or "").lower() for a in (msg.get("to_addrs") or [])]
|
||||
|
||||
sender_acc = accounts_by_email.get(from_addr)
|
||||
if sender_acc:
|
||||
# Outbound copy in mailbox; not part of "new inbound mail" banner.
|
||||
continue
|
||||
|
||||
target_acc = None
|
||||
for addr in to_addrs:
|
||||
if addr in accounts_by_email:
|
||||
target_acc = accounts_by_email[addr]
|
||||
break
|
||||
resolved_account_key = (target_acc["key"] if target_acc else fetch_account_key)
|
||||
if target_acc and not target_acc.get("sync_inbound"):
|
||||
continue
|
||||
if (mid, resolved_account_key) not in known_ids:
|
||||
new_count += 1
|
||||
|
||||
return {"new_count": new_count}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# SMTP send (synchronous — called via run_in_executor)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _append_to_sent_sync(account: dict, raw_message: bytes) -> None:
|
||||
"""Best-effort append of sent MIME message to IMAP Sent folder."""
|
||||
if not raw_message:
|
||||
return
|
||||
try:
|
||||
if account.get("imap_use_ssl"):
|
||||
imap = imaplib.IMAP4_SSL(account["imap_host"], int(account["imap_port"]))
|
||||
else:
|
||||
imap = imaplib.IMAP4(account["imap_host"], int(account["imap_port"]))
|
||||
imap.login(account["imap_username"], account["imap_password"])
|
||||
|
||||
preferred = str(account.get("imap_sent") or "Sent").strip() or "Sent"
|
||||
candidates = [preferred, "Sent", "INBOX.Sent", "Sent Items", "INBOX.Sent Items"]
|
||||
seen = set()
|
||||
ordered_candidates = []
|
||||
for name in candidates:
|
||||
key = name.lower()
|
||||
if key not in seen:
|
||||
seen.add(key)
|
||||
ordered_candidates.append(name)
|
||||
|
||||
appended = False
|
||||
for mailbox in ordered_candidates:
|
||||
try:
|
||||
status, _ = imap.append(mailbox, "\\Seen", None, raw_message)
|
||||
if status == "OK":
|
||||
appended = True
|
||||
break
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
if not appended:
|
||||
logger.warning("[EMAIL SEND] Sent copy append failed for account=%s", account.get("key"))
|
||||
imap.logout()
|
||||
except Exception as e:
|
||||
logger.warning("[EMAIL SEND] IMAP append to Sent failed for account=%s: %s", account.get("key"), e)
|
||||
|
||||
|
||||
def _send_email_sync(
|
||||
account: dict,
|
||||
to: str,
|
||||
subject: str,
|
||||
body: str,
|
||||
body_html: str,
|
||||
cc: List[str],
|
||||
file_attachments: Optional[List[Tuple[str, bytes, str]]] = None,
|
||||
) -> str:
|
||||
"""Send via SMTP. Returns the Message-ID header.
|
||||
file_attachments: list of (filename, content_bytes, mime_type)
|
||||
"""
|
||||
html_with_cids, inline_images = _extract_inline_data_images(body_html or "")
|
||||
|
||||
# Build body tree:
|
||||
# - with inline images: related(alternative(text/plain, text/html), image parts)
|
||||
# - without inline images: alternative(text/plain, text/html)
|
||||
if inline_images:
|
||||
body_part = MIMEMultipart("related")
|
||||
alt_part = MIMEMultipart("alternative")
|
||||
alt_part.attach(MIMEText(body, "plain", "utf-8"))
|
||||
if html_with_cids:
|
||||
alt_part.attach(MIMEText(html_with_cids, "html", "utf-8"))
|
||||
body_part.attach(alt_part)
|
||||
|
||||
for idx, (cid, content, mime_type) in enumerate(inline_images, start=1):
|
||||
maintype, _, subtype = mime_type.partition("/")
|
||||
img_part = MIMEBase(maintype or "image", subtype or "png")
|
||||
img_part.set_payload(content)
|
||||
encoders.encode_base64(img_part)
|
||||
img_part.add_header("Content-ID", f"<{cid}>")
|
||||
img_part.add_header("Content-Disposition", "inline", filename=f"inline-{idx}.{subtype or 'png'}")
|
||||
body_part.attach(img_part)
|
||||
else:
|
||||
body_part = MIMEMultipart("alternative")
|
||||
body_part.attach(MIMEText(body, "plain", "utf-8"))
|
||||
if body_html:
|
||||
body_part.attach(MIMEText(body_html, "html", "utf-8"))
|
||||
|
||||
# Wrap with mixed only when classic file attachments exist.
|
||||
if file_attachments:
|
||||
msg = MIMEMultipart("mixed")
|
||||
msg.attach(body_part)
|
||||
else:
|
||||
msg = body_part
|
||||
|
||||
from_addr = account["email"]
|
||||
msg["From"] = from_addr
|
||||
msg["To"] = to
|
||||
msg["Subject"] = subject
|
||||
if cc:
|
||||
msg["Cc"] = ", ".join(cc)
|
||||
|
||||
msg_id = f"<{uuid.uuid4()}@bellsystems>"
|
||||
msg["Message-ID"] = msg_id
|
||||
|
||||
# Attach files
|
||||
for filename, content, mime_type in (file_attachments or []):
|
||||
maintype, _, subtype = mime_type.partition("/")
|
||||
part = MIMEBase(maintype or "application", subtype or "octet-stream")
|
||||
part.set_payload(content)
|
||||
encoders.encode_base64(part)
|
||||
part.add_header("Content-Disposition", "attachment", filename=filename)
|
||||
msg.attach(part)
|
||||
|
||||
recipients = [to] + cc
|
||||
raw_for_append = msg.as_bytes()
|
||||
if account.get("smtp_use_tls"):
|
||||
server = smtplib.SMTP(account["smtp_host"], int(account["smtp_port"]))
|
||||
server.starttls()
|
||||
else:
|
||||
server = smtplib.SMTP_SSL(account["smtp_host"], int(account["smtp_port"]))
|
||||
|
||||
server.login(account["smtp_username"], account["smtp_password"])
|
||||
server.sendmail(from_addr, recipients, msg.as_string())
|
||||
server.quit()
|
||||
_append_to_sent_sync(account, raw_for_append)
|
||||
|
||||
return msg_id
|
||||
|
||||
|
||||
async def send_email(
|
||||
customer_id: str | None,
|
||||
from_account: str | None,
|
||||
to: str,
|
||||
subject: str,
|
||||
body: str,
|
||||
body_html: str,
|
||||
cc: List[str],
|
||||
sent_by: str,
|
||||
file_attachments: Optional[List[Tuple[str, bytes, str]]] = None,
|
||||
) -> dict:
|
||||
"""Send an email and record it in crm_comms_log. Returns the new log entry.
|
||||
file_attachments: list of (filename, content_bytes, mime_type)
|
||||
"""
|
||||
accounts = get_mail_accounts()
|
||||
if not accounts:
|
||||
raise RuntimeError("SMTP not configured")
|
||||
account = account_by_key(from_account) if from_account else None
|
||||
if not account:
|
||||
raise RuntimeError("Please select a valid sender account")
|
||||
if not account.get("allow_send"):
|
||||
raise RuntimeError("Selected account is not allowed to send")
|
||||
if not account.get("smtp_host") or not account.get("smtp_username") or not account.get("smtp_password"):
|
||||
raise RuntimeError("SMTP not configured for selected account")
|
||||
|
||||
# If the caller did not provide a customer_id (e.g. compose from Mail page),
|
||||
# auto-link by matching recipient addresses against CRM customer emails.
|
||||
resolved_customer_id = customer_id
|
||||
if not resolved_customer_id:
|
||||
addr_to_customer = _load_customer_email_map()
|
||||
rcpts = [to, *cc]
|
||||
parsed_rcpts = [addr for _, addr in email.utils.getaddresses(rcpts) if addr]
|
||||
for addr in parsed_rcpts:
|
||||
key = (addr or "").strip().lower()
|
||||
if key in addr_to_customer:
|
||||
resolved_customer_id = addr_to_customer[key]
|
||||
break
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
import functools
|
||||
msg_id = await loop.run_in_executor(
|
||||
None,
|
||||
functools.partial(_send_email_sync, account, to, subject, body, body_html, cc, file_attachments or []),
|
||||
)
|
||||
|
||||
# Upload attachments to Nextcloud and register in crm_media
|
||||
comm_attachments = []
|
||||
if file_attachments and resolved_customer_id:
|
||||
from crm import nextcloud, service
|
||||
from crm.models import MediaCreate, MediaDirection
|
||||
from shared.firebase import get_db as get_firestore
|
||||
firestore_db = get_firestore()
|
||||
doc = firestore_db.collection("crm_customers").document(resolved_customer_id).get()
|
||||
if doc.exists:
|
||||
data = doc.to_dict()
|
||||
# Build a minimal CustomerInDB-like object for get_customer_nc_path
|
||||
folder_id = data.get("folder_id") or resolved_customer_id
|
||||
nc_path = folder_id
|
||||
|
||||
for filename, content, mime_type in file_attachments:
|
||||
# images/video → sent_media, everything else → documents
|
||||
if mime_type.startswith("image/") or mime_type.startswith("video/"):
|
||||
subfolder = "sent_media"
|
||||
else:
|
||||
subfolder = "documents"
|
||||
target_folder = f"customers/{nc_path}/{subfolder}"
|
||||
file_path = f"{target_folder}/{filename}"
|
||||
try:
|
||||
await nextcloud.ensure_folder(target_folder)
|
||||
await nextcloud.upload_file(file_path, content, mime_type)
|
||||
await service.create_media(MediaCreate(
|
||||
customer_id=resolved_customer_id,
|
||||
filename=filename,
|
||||
nextcloud_path=file_path,
|
||||
mime_type=mime_type,
|
||||
direction=MediaDirection.sent,
|
||||
tags=["email-attachment"],
|
||||
uploaded_by=sent_by,
|
||||
))
|
||||
comm_attachments.append({"filename": filename, "nextcloud_path": file_path})
|
||||
except Exception as e:
|
||||
logger.warning(f"[EMAIL SEND] Failed to upload attachment {filename}: {e}")
|
||||
|
||||
now = datetime.now(timezone.utc).isoformat()
|
||||
entry_id = str(uuid.uuid4())
|
||||
db = await mqtt_db.get_db()
|
||||
our_addr = account["email"].lower()
|
||||
to_addrs_json = json.dumps([to] + cc)
|
||||
attachments_json = json.dumps(comm_attachments)
|
||||
await db.execute(
|
||||
"""INSERT INTO crm_comms_log
|
||||
(id, customer_id, type, mail_account, direction, subject, body, body_html, attachments,
|
||||
ext_message_id, from_addr, to_addrs, logged_by, occurred_at, created_at)
|
||||
VALUES (?, ?, 'email', ?, 'outbound', ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
|
||||
(entry_id, resolved_customer_id, account["key"], subject, body, body_html, attachments_json, msg_id,
|
||||
our_addr, to_addrs_json, sent_by, now, now),
|
||||
)
|
||||
await db.commit()
|
||||
|
||||
return {
|
||||
"id": entry_id,
|
||||
"customer_id": resolved_customer_id,
|
||||
"type": "email",
|
||||
"mail_account": account["key"],
|
||||
"direction": "outbound",
|
||||
"subject": subject,
|
||||
"body": body,
|
||||
"body_html": body_html,
|
||||
"attachments": comm_attachments,
|
||||
"ext_message_id": msg_id,
|
||||
"from_addr": our_addr,
|
||||
"to_addrs": [to] + cc,
|
||||
"logged_by": sent_by,
|
||||
"occurred_at": now,
|
||||
"created_at": now,
|
||||
}
|
||||
|
||||
|
||||
def _delete_remote_email_sync(account: dict, ext_message_id: str) -> bool:
|
||||
if not ext_message_id:
|
||||
return False
|
||||
if account.get("imap_use_ssl"):
|
||||
imap = imaplib.IMAP4_SSL(account["imap_host"], int(account["imap_port"]))
|
||||
else:
|
||||
imap = imaplib.IMAP4(account["imap_host"], int(account["imap_port"]))
|
||||
imap.login(account["imap_username"], account["imap_password"])
|
||||
imap.select(account.get("imap_inbox", "INBOX"))
|
||||
_, data = imap.search(None, f'HEADER Message-ID "{ext_message_id}"')
|
||||
uids = data[0].split() if data and data[0] else []
|
||||
if not uids:
|
||||
imap.logout()
|
||||
return False
|
||||
for uid in uids:
|
||||
imap.store(uid, "+FLAGS", "\\Deleted")
|
||||
imap.expunge()
|
||||
imap.logout()
|
||||
return True
|
||||
|
||||
|
||||
async def delete_remote_email(ext_message_id: str, mail_account: str | None, from_addr: str | None = None) -> bool:
|
||||
account = account_by_key(mail_account) if mail_account else None
|
||||
if not account:
|
||||
account = account_by_email(from_addr)
|
||||
if not account or not account.get("imap_host"):
|
||||
return False
|
||||
loop = asyncio.get_event_loop()
|
||||
try:
|
||||
return await loop.run_in_executor(None, lambda: _delete_remote_email_sync(account, ext_message_id))
|
||||
except Exception as e:
|
||||
logger.warning(f"[EMAIL DELETE] Failed remote delete for {ext_message_id}: {e}")
|
||||
return False
|
||||
|
||||
|
||||
def _set_remote_read_sync(account: dict, ext_message_id: str, read: bool) -> bool:
|
||||
if not ext_message_id:
|
||||
return False
|
||||
if account.get("imap_use_ssl"):
|
||||
imap = imaplib.IMAP4_SSL(account["imap_host"], int(account["imap_port"]))
|
||||
else:
|
||||
imap = imaplib.IMAP4(account["imap_host"], int(account["imap_port"]))
|
||||
imap.login(account["imap_username"], account["imap_password"])
|
||||
imap.select(account.get("imap_inbox", "INBOX"))
|
||||
_, data = imap.search(None, f'HEADER Message-ID "{ext_message_id}"')
|
||||
uids = data[0].split() if data and data[0] else []
|
||||
if not uids:
|
||||
imap.logout()
|
||||
return False
|
||||
flag_op = "+FLAGS" if read else "-FLAGS"
|
||||
for uid in uids:
|
||||
imap.store(uid, flag_op, "\\Seen")
|
||||
imap.logout()
|
||||
return True
|
||||
|
||||
|
||||
async def set_remote_read(ext_message_id: str, mail_account: str | None, from_addr: str | None, read: bool) -> bool:
|
||||
account = account_by_key(mail_account) if mail_account else None
|
||||
if not account:
|
||||
account = account_by_email(from_addr)
|
||||
if not account or not account.get("imap_host"):
|
||||
return False
|
||||
loop = asyncio.get_event_loop()
|
||||
try:
|
||||
return await loop.run_in_executor(None, lambda: _set_remote_read_sync(account, ext_message_id, read))
|
||||
except Exception as e:
|
||||
logger.warning(f"[EMAIL READ] Failed remote read update for {ext_message_id}: {e}")
|
||||
return False
|
||||
|
||||
|
||||
|
||||
104
backend/crm/mail_accounts.py
Normal file
104
backend/crm/mail_accounts.py
Normal file
@@ -0,0 +1,104 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from config import settings
|
||||
|
||||
|
||||
def _bool(v: Any, default: bool) -> bool:
|
||||
if isinstance(v, bool):
|
||||
return v
|
||||
if isinstance(v, str):
|
||||
return v.strip().lower() in {"1", "true", "yes", "on"}
|
||||
if v is None:
|
||||
return default
|
||||
return bool(v)
|
||||
|
||||
|
||||
def get_mail_accounts() -> list[dict]:
|
||||
"""
|
||||
Returns normalized account dictionaries.
|
||||
Falls back to legacy single-account config if MAIL_ACCOUNTS_JSON is empty.
|
||||
"""
|
||||
configured = settings.mail_accounts
|
||||
normalized: list[dict] = []
|
||||
|
||||
for idx, raw in enumerate(configured):
|
||||
if not isinstance(raw, dict):
|
||||
continue
|
||||
key = str(raw.get("key") or "").strip().lower()
|
||||
email = str(raw.get("email") or "").strip().lower()
|
||||
if not key or not email:
|
||||
continue
|
||||
normalized.append(
|
||||
{
|
||||
"key": key,
|
||||
"label": str(raw.get("label") or key.title()),
|
||||
"email": email,
|
||||
"imap_host": raw.get("imap_host") or settings.imap_host,
|
||||
"imap_port": int(raw.get("imap_port") or settings.imap_port or 993),
|
||||
"imap_username": raw.get("imap_username") or email,
|
||||
"imap_password": raw.get("imap_password") or settings.imap_password,
|
||||
"imap_use_ssl": _bool(raw.get("imap_use_ssl"), settings.imap_use_ssl),
|
||||
"imap_inbox": str(raw.get("imap_inbox") or "INBOX"),
|
||||
"imap_sent": str(raw.get("imap_sent") or "Sent"),
|
||||
"smtp_host": raw.get("smtp_host") or settings.smtp_host,
|
||||
"smtp_port": int(raw.get("smtp_port") or settings.smtp_port or 587),
|
||||
"smtp_username": raw.get("smtp_username") or email,
|
||||
"smtp_password": raw.get("smtp_password") or settings.smtp_password,
|
||||
"smtp_use_tls": _bool(raw.get("smtp_use_tls"), settings.smtp_use_tls),
|
||||
"sync_inbound": _bool(raw.get("sync_inbound"), True),
|
||||
"allow_send": _bool(raw.get("allow_send"), True),
|
||||
}
|
||||
)
|
||||
|
||||
if normalized:
|
||||
return normalized
|
||||
|
||||
# Legacy single-account fallback
|
||||
if settings.imap_host or settings.smtp_host:
|
||||
legacy_email = (settings.smtp_username or settings.imap_username or "").strip().lower()
|
||||
if legacy_email:
|
||||
return [
|
||||
{
|
||||
"key": "default",
|
||||
"label": "Default",
|
||||
"email": legacy_email,
|
||||
"imap_host": settings.imap_host,
|
||||
"imap_port": settings.imap_port,
|
||||
"imap_username": settings.imap_username,
|
||||
"imap_password": settings.imap_password,
|
||||
"imap_use_ssl": settings.imap_use_ssl,
|
||||
"imap_inbox": "INBOX",
|
||||
"imap_sent": "Sent",
|
||||
"smtp_host": settings.smtp_host,
|
||||
"smtp_port": settings.smtp_port,
|
||||
"smtp_username": settings.smtp_username,
|
||||
"smtp_password": settings.smtp_password,
|
||||
"smtp_use_tls": settings.smtp_use_tls,
|
||||
"sync_inbound": True,
|
||||
"allow_send": True,
|
||||
}
|
||||
]
|
||||
|
||||
return []
|
||||
|
||||
|
||||
def account_by_key(key: str | None) -> dict | None:
|
||||
k = (key or "").strip().lower()
|
||||
if not k:
|
||||
return None
|
||||
for acc in get_mail_accounts():
|
||||
if acc["key"] == k:
|
||||
return acc
|
||||
return None
|
||||
|
||||
|
||||
def account_by_email(email_addr: str | None) -> dict | None:
|
||||
e = (email_addr or "").strip().lower()
|
||||
if not e:
|
||||
return None
|
||||
for acc in get_mail_accounts():
|
||||
if acc["email"] == e:
|
||||
return acc
|
||||
return None
|
||||
35
backend/crm/media_router.py
Normal file
35
backend/crm/media_router.py
Normal file
@@ -0,0 +1,35 @@
|
||||
from fastapi import APIRouter, Depends, Query
|
||||
from typing import Optional
|
||||
|
||||
from auth.models import TokenPayload
|
||||
from auth.dependencies import require_permission
|
||||
from crm.models import MediaCreate, MediaInDB, MediaListResponse
|
||||
from crm import service
|
||||
|
||||
router = APIRouter(prefix="/api/crm/media", tags=["crm-media"])
|
||||
|
||||
|
||||
@router.get("", response_model=MediaListResponse)
|
||||
async def list_media(
|
||||
customer_id: Optional[str] = Query(None),
|
||||
order_id: Optional[str] = Query(None),
|
||||
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||
):
|
||||
items = await service.list_media(customer_id=customer_id, order_id=order_id)
|
||||
return MediaListResponse(items=items, total=len(items))
|
||||
|
||||
|
||||
@router.post("", response_model=MediaInDB, status_code=201)
|
||||
async def create_media(
|
||||
body: MediaCreate,
|
||||
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||
):
|
||||
return await service.create_media(body)
|
||||
|
||||
|
||||
@router.delete("/{media_id}", status_code=204)
|
||||
async def delete_media(
|
||||
media_id: str,
|
||||
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||
):
|
||||
await service.delete_media(media_id)
|
||||
353
backend/crm/models.py
Normal file
353
backend/crm/models.py
Normal file
@@ -0,0 +1,353 @@
|
||||
from enum import Enum
|
||||
from typing import List, Optional
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class ProductCategory(str, Enum):
|
||||
controller = "controller"
|
||||
striker = "striker"
|
||||
clock = "clock"
|
||||
part = "part"
|
||||
repair_service = "repair_service"
|
||||
|
||||
|
||||
class CostLineItem(BaseModel):
|
||||
name: str
|
||||
quantity: float = 1
|
||||
price: float = 0.0
|
||||
|
||||
|
||||
class ProductCosts(BaseModel):
|
||||
labor_hours: Optional[float] = None
|
||||
labor_rate: Optional[float] = None
|
||||
items: List[CostLineItem] = []
|
||||
total: Optional[float] = None
|
||||
|
||||
|
||||
class ProductStock(BaseModel):
|
||||
on_hand: int = 0
|
||||
reserved: int = 0
|
||||
available: int = 0
|
||||
|
||||
|
||||
class ProductCreate(BaseModel):
|
||||
name: str
|
||||
sku: Optional[str] = None
|
||||
category: ProductCategory
|
||||
description: Optional[str] = None
|
||||
price: float
|
||||
currency: str = "EUR"
|
||||
costs: Optional[ProductCosts] = None
|
||||
stock: Optional[ProductStock] = None
|
||||
active: bool = True
|
||||
status: str = "active" # active | discontinued | planned
|
||||
photo_url: Optional[str] = None
|
||||
|
||||
|
||||
class ProductUpdate(BaseModel):
|
||||
name: Optional[str] = None
|
||||
sku: Optional[str] = None
|
||||
category: Optional[ProductCategory] = None
|
||||
description: Optional[str] = None
|
||||
price: Optional[float] = None
|
||||
currency: Optional[str] = None
|
||||
costs: Optional[ProductCosts] = None
|
||||
stock: Optional[ProductStock] = None
|
||||
active: Optional[bool] = None
|
||||
status: Optional[str] = None
|
||||
photo_url: Optional[str] = None
|
||||
|
||||
|
||||
class ProductInDB(ProductCreate):
|
||||
id: str
|
||||
created_at: str
|
||||
updated_at: str
|
||||
|
||||
|
||||
class ProductListResponse(BaseModel):
|
||||
products: List[ProductInDB]
|
||||
total: int
|
||||
|
||||
|
||||
# ── Customers ────────────────────────────────────────────────────────────────
|
||||
|
||||
class ContactType(str, Enum):
|
||||
email = "email"
|
||||
phone = "phone"
|
||||
whatsapp = "whatsapp"
|
||||
other = "other"
|
||||
|
||||
|
||||
class CustomerContact(BaseModel):
|
||||
type: ContactType
|
||||
label: str
|
||||
value: str
|
||||
primary: bool = False
|
||||
|
||||
|
||||
class CustomerNote(BaseModel):
|
||||
text: str
|
||||
by: str
|
||||
at: str
|
||||
|
||||
|
||||
class OwnedItemType(str, Enum):
|
||||
console_device = "console_device"
|
||||
product = "product"
|
||||
freetext = "freetext"
|
||||
|
||||
|
||||
class OwnedItem(BaseModel):
|
||||
type: OwnedItemType
|
||||
# console_device fields
|
||||
device_id: Optional[str] = None
|
||||
label: Optional[str] = None
|
||||
# product fields
|
||||
product_id: Optional[str] = None
|
||||
product_name: Optional[str] = None
|
||||
quantity: Optional[int] = None
|
||||
serial_numbers: Optional[List[str]] = None
|
||||
# freetext fields
|
||||
description: Optional[str] = None
|
||||
serial_number: Optional[str] = None
|
||||
notes: Optional[str] = None
|
||||
|
||||
|
||||
class CustomerLocation(BaseModel):
|
||||
city: Optional[str] = None
|
||||
country: Optional[str] = None
|
||||
region: Optional[str] = None
|
||||
|
||||
|
||||
class CustomerCreate(BaseModel):
|
||||
title: Optional[str] = None
|
||||
name: str
|
||||
surname: Optional[str] = None
|
||||
organization: Optional[str] = None
|
||||
contacts: List[CustomerContact] = []
|
||||
notes: List[CustomerNote] = []
|
||||
location: Optional[CustomerLocation] = None
|
||||
language: str = "el"
|
||||
tags: List[str] = []
|
||||
owned_items: List[OwnedItem] = []
|
||||
linked_user_ids: List[str] = []
|
||||
nextcloud_folder: Optional[str] = None
|
||||
folder_id: Optional[str] = None # Human-readable Nextcloud folder name, e.g. "saint-john-corfu"
|
||||
|
||||
|
||||
class CustomerUpdate(BaseModel):
|
||||
title: Optional[str] = None
|
||||
name: Optional[str] = None
|
||||
surname: Optional[str] = None
|
||||
organization: Optional[str] = None
|
||||
contacts: Optional[List[CustomerContact]] = None
|
||||
notes: Optional[List[CustomerNote]] = None
|
||||
location: Optional[CustomerLocation] = None
|
||||
language: Optional[str] = None
|
||||
tags: Optional[List[str]] = None
|
||||
owned_items: Optional[List[OwnedItem]] = None
|
||||
linked_user_ids: Optional[List[str]] = None
|
||||
nextcloud_folder: Optional[str] = None
|
||||
# folder_id intentionally excluded from update — set once at creation
|
||||
|
||||
|
||||
class CustomerInDB(CustomerCreate):
|
||||
id: str
|
||||
created_at: str
|
||||
updated_at: str
|
||||
|
||||
|
||||
class CustomerListResponse(BaseModel):
|
||||
customers: List[CustomerInDB]
|
||||
total: int
|
||||
|
||||
|
||||
# ── Orders ───────────────────────────────────────────────────────────────────
|
||||
|
||||
class OrderStatus(str, Enum):
|
||||
draft = "draft"
|
||||
confirmed = "confirmed"
|
||||
in_production = "in_production"
|
||||
shipped = "shipped"
|
||||
delivered = "delivered"
|
||||
cancelled = "cancelled"
|
||||
|
||||
|
||||
class PaymentStatus(str, Enum):
|
||||
pending = "pending"
|
||||
partial = "partial"
|
||||
paid = "paid"
|
||||
|
||||
|
||||
class OrderDiscount(BaseModel):
|
||||
type: str # "percentage" | "fixed"
|
||||
value: float = 0
|
||||
reason: Optional[str] = None
|
||||
|
||||
|
||||
class OrderShipping(BaseModel):
|
||||
method: Optional[str] = None
|
||||
tracking_number: Optional[str] = None
|
||||
carrier: Optional[str] = None
|
||||
shipped_at: Optional[str] = None
|
||||
delivered_at: Optional[str] = None
|
||||
destination: Optional[str] = None
|
||||
|
||||
|
||||
class OrderItem(BaseModel):
|
||||
type: str # console_device | product | freetext
|
||||
product_id: Optional[str] = None
|
||||
product_name: Optional[str] = None
|
||||
description: Optional[str] = None
|
||||
quantity: int = 1
|
||||
unit_price: float = 0.0
|
||||
serial_numbers: List[str] = []
|
||||
|
||||
|
||||
class OrderCreate(BaseModel):
|
||||
customer_id: str
|
||||
order_number: Optional[str] = None
|
||||
status: OrderStatus = OrderStatus.draft
|
||||
items: List[OrderItem] = []
|
||||
subtotal: float = 0
|
||||
discount: Optional[OrderDiscount] = None
|
||||
total_price: float = 0
|
||||
currency: str = "EUR"
|
||||
shipping: Optional[OrderShipping] = None
|
||||
payment_status: PaymentStatus = PaymentStatus.pending
|
||||
invoice_path: Optional[str] = None
|
||||
notes: Optional[str] = None
|
||||
|
||||
|
||||
class OrderUpdate(BaseModel):
|
||||
customer_id: Optional[str] = None
|
||||
order_number: Optional[str] = None
|
||||
status: Optional[OrderStatus] = None
|
||||
items: Optional[List[OrderItem]] = None
|
||||
subtotal: Optional[float] = None
|
||||
discount: Optional[OrderDiscount] = None
|
||||
total_price: Optional[float] = None
|
||||
currency: Optional[str] = None
|
||||
shipping: Optional[OrderShipping] = None
|
||||
payment_status: Optional[PaymentStatus] = None
|
||||
invoice_path: Optional[str] = None
|
||||
notes: Optional[str] = None
|
||||
|
||||
|
||||
class OrderInDB(OrderCreate):
|
||||
id: str
|
||||
created_at: str
|
||||
updated_at: str
|
||||
|
||||
|
||||
class OrderListResponse(BaseModel):
|
||||
orders: List[OrderInDB]
|
||||
total: int
|
||||
|
||||
|
||||
# ── Comms Log ─────────────────────────────────────────────────────────────────
|
||||
|
||||
class CommType(str, Enum):
|
||||
email = "email"
|
||||
whatsapp = "whatsapp"
|
||||
call = "call"
|
||||
sms = "sms"
|
||||
note = "note"
|
||||
in_person = "in_person"
|
||||
|
||||
|
||||
class CommDirection(str, Enum):
|
||||
inbound = "inbound"
|
||||
outbound = "outbound"
|
||||
internal = "internal"
|
||||
|
||||
|
||||
class CommAttachment(BaseModel):
|
||||
filename: str
|
||||
nextcloud_path: Optional[str] = None
|
||||
content_type: Optional[str] = None
|
||||
size: Optional[int] = None
|
||||
|
||||
|
||||
class CommCreate(BaseModel):
|
||||
customer_id: Optional[str] = None
|
||||
type: CommType
|
||||
mail_account: Optional[str] = None
|
||||
direction: CommDirection
|
||||
subject: Optional[str] = None
|
||||
body: Optional[str] = None
|
||||
body_html: Optional[str] = None
|
||||
attachments: List[CommAttachment] = []
|
||||
ext_message_id: Optional[str] = None
|
||||
from_addr: Optional[str] = None
|
||||
to_addrs: Optional[List[str]] = None
|
||||
logged_by: Optional[str] = None
|
||||
occurred_at: Optional[str] = None # defaults to now if not provided
|
||||
|
||||
|
||||
class CommUpdate(BaseModel):
|
||||
subject: Optional[str] = None
|
||||
body: Optional[str] = None
|
||||
occurred_at: Optional[str] = None
|
||||
|
||||
|
||||
class CommInDB(BaseModel):
|
||||
id: str
|
||||
customer_id: Optional[str] = None
|
||||
type: CommType
|
||||
mail_account: Optional[str] = None
|
||||
direction: CommDirection
|
||||
subject: Optional[str] = None
|
||||
body: Optional[str] = None
|
||||
body_html: Optional[str] = None
|
||||
attachments: List[CommAttachment] = []
|
||||
ext_message_id: Optional[str] = None
|
||||
from_addr: Optional[str] = None
|
||||
to_addrs: Optional[List[str]] = None
|
||||
logged_by: Optional[str] = None
|
||||
occurred_at: str
|
||||
created_at: str
|
||||
is_important: bool = False
|
||||
is_read: bool = False
|
||||
|
||||
|
||||
class CommListResponse(BaseModel):
|
||||
entries: List[CommInDB]
|
||||
total: int
|
||||
|
||||
|
||||
# ── Media ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
class MediaDirection(str, Enum):
|
||||
received = "received"
|
||||
sent = "sent"
|
||||
internal = "internal"
|
||||
|
||||
|
||||
class MediaCreate(BaseModel):
|
||||
customer_id: Optional[str] = None
|
||||
order_id: Optional[str] = None
|
||||
filename: str
|
||||
nextcloud_path: str
|
||||
mime_type: Optional[str] = None
|
||||
direction: Optional[MediaDirection] = None
|
||||
tags: List[str] = []
|
||||
uploaded_by: Optional[str] = None
|
||||
|
||||
|
||||
class MediaInDB(BaseModel):
|
||||
id: str
|
||||
customer_id: Optional[str] = None
|
||||
order_id: Optional[str] = None
|
||||
filename: str
|
||||
nextcloud_path: str
|
||||
mime_type: Optional[str] = None
|
||||
direction: Optional[MediaDirection] = None
|
||||
tags: List[str] = []
|
||||
uploaded_by: Optional[str] = None
|
||||
created_at: str
|
||||
|
||||
|
||||
class MediaListResponse(BaseModel):
|
||||
items: List[MediaInDB]
|
||||
total: int
|
||||
314
backend/crm/nextcloud.py
Normal file
314
backend/crm/nextcloud.py
Normal file
@@ -0,0 +1,314 @@
|
||||
"""
|
||||
Nextcloud WebDAV client.
|
||||
|
||||
All paths passed to these functions are relative to `settings.nextcloud_base_path`.
|
||||
The full WebDAV URL is:
|
||||
{nextcloud_url}/remote.php/dav/files/{username}/{base_path}/{relative_path}
|
||||
"""
|
||||
import xml.etree.ElementTree as ET
|
||||
from typing import List
|
||||
from urllib.parse import unquote
|
||||
|
||||
import httpx
|
||||
from fastapi import HTTPException
|
||||
|
||||
from config import settings
|
||||
|
||||
DAV_NS = "DAV:"
|
||||
|
||||
# Default timeout for all Nextcloud WebDAV requests (seconds)
|
||||
_TIMEOUT = 60.0
|
||||
|
||||
# Shared async client — reuses TCP connections across requests so Nextcloud
|
||||
# doesn't see rapid connection bursts that trigger brute-force throttling.
|
||||
_http_client: httpx.AsyncClient | None = None
|
||||
|
||||
|
||||
def _get_client() -> httpx.AsyncClient:
|
||||
global _http_client
|
||||
if _http_client is None or _http_client.is_closed:
|
||||
_http_client = httpx.AsyncClient(
|
||||
timeout=_TIMEOUT,
|
||||
follow_redirects=True,
|
||||
headers={"User-Agent": "BellSystems-CP/1.0"},
|
||||
)
|
||||
return _http_client
|
||||
|
||||
|
||||
async def close_client() -> None:
|
||||
"""Close the shared HTTP client. Call this on application shutdown."""
|
||||
global _http_client
|
||||
if _http_client and not _http_client.is_closed:
|
||||
await _http_client.aclose()
|
||||
_http_client = None
|
||||
|
||||
|
||||
async def keepalive_ping() -> None:
|
||||
"""
|
||||
Send a lightweight PROPFIND Depth:0 to the Nextcloud base folder to keep
|
||||
the TCP connection alive. Safe to call even if Nextcloud is not configured.
|
||||
"""
|
||||
if not settings.nextcloud_url:
|
||||
return
|
||||
try:
|
||||
url = _base_url()
|
||||
client = _get_client()
|
||||
await client.request(
|
||||
"PROPFIND",
|
||||
url,
|
||||
auth=_auth(),
|
||||
headers={"Depth": "0", "Content-Type": "application/xml"},
|
||||
content=_PROPFIND_BODY,
|
||||
)
|
||||
except Exception as e:
|
||||
print(f"[NEXTCLOUD KEEPALIVE] ping failed: {e}")
|
||||
|
||||
|
||||
def _dav_user() -> str:
|
||||
"""The username used in the WebDAV URL path (may differ from the login username)."""
|
||||
return settings.nextcloud_dav_user or settings.nextcloud_username
|
||||
|
||||
|
||||
def _base_url() -> str:
|
||||
if not settings.nextcloud_url:
|
||||
raise HTTPException(status_code=503, detail="Nextcloud not configured")
|
||||
return (
|
||||
f"{settings.nextcloud_url.rstrip('/')}"
|
||||
f"/remote.php/dav/files/{_dav_user()}"
|
||||
f"/{settings.nextcloud_base_path}"
|
||||
)
|
||||
|
||||
|
||||
def _auth() -> tuple[str, str]:
|
||||
return (settings.nextcloud_username, settings.nextcloud_password)
|
||||
|
||||
|
||||
def _full_url(relative_path: str) -> str:
|
||||
"""Build full WebDAV URL for a relative path."""
|
||||
path = relative_path.strip("/")
|
||||
base = _base_url()
|
||||
return f"{base}/{path}" if path else base
|
||||
|
||||
|
||||
def _parse_propfind(xml_bytes: bytes, base_path_prefix: str) -> List[dict]:
|
||||
"""
|
||||
Parse a PROPFIND XML response.
|
||||
Returns list of file/folder entries, skipping the root itself.
|
||||
"""
|
||||
root = ET.fromstring(xml_bytes)
|
||||
results = []
|
||||
|
||||
# The prefix we need to strip from D:href to get the relative path back
|
||||
# href looks like: /remote.php/dav/files/user/BellSystems/Console/customers/abc/
|
||||
dav_prefix = (
|
||||
f"/remote.php/dav/files/{_dav_user()}"
|
||||
f"/{settings.nextcloud_base_path}/"
|
||||
)
|
||||
|
||||
for response in root.findall(f"{{{DAV_NS}}}response"):
|
||||
href_el = response.find(f"{{{DAV_NS}}}href")
|
||||
if href_el is None:
|
||||
continue
|
||||
href = unquote(href_el.text or "")
|
||||
|
||||
# Strip DAV prefix to get relative path within base_path
|
||||
if href.startswith(dav_prefix):
|
||||
rel = href[len(dav_prefix):].rstrip("/")
|
||||
else:
|
||||
rel = href
|
||||
|
||||
# Skip the folder itself (the root of the PROPFIND request)
|
||||
if rel == base_path_prefix.strip("/"):
|
||||
continue
|
||||
|
||||
propstat = response.find(f"{{{DAV_NS}}}propstat")
|
||||
if propstat is None:
|
||||
continue
|
||||
prop = propstat.find(f"{{{DAV_NS}}}prop")
|
||||
if prop is None:
|
||||
continue
|
||||
|
||||
# is_dir: resourcetype contains D:collection
|
||||
resource_type = prop.find(f"{{{DAV_NS}}}resourcetype")
|
||||
is_dir = resource_type is not None and resource_type.find(f"{{{DAV_NS}}}collection") is not None
|
||||
|
||||
content_type_el = prop.find(f"{{{DAV_NS}}}getcontenttype")
|
||||
mime_type = content_type_el.text if content_type_el is not None else (
|
||||
"inode/directory" if is_dir else "application/octet-stream"
|
||||
)
|
||||
|
||||
size_el = prop.find(f"{{{DAV_NS}}}getcontentlength")
|
||||
size = int(size_el.text) if size_el is not None and size_el.text else 0
|
||||
|
||||
modified_el = prop.find(f"{{{DAV_NS}}}getlastmodified")
|
||||
last_modified = modified_el.text if modified_el is not None else None
|
||||
|
||||
filename = rel.split("/")[-1] if rel else ""
|
||||
|
||||
results.append({
|
||||
"filename": filename,
|
||||
"path": rel,
|
||||
"mime_type": mime_type,
|
||||
"size": size,
|
||||
"last_modified": last_modified,
|
||||
"is_dir": is_dir,
|
||||
})
|
||||
|
||||
return results
|
||||
|
||||
|
||||
async def ensure_folder(relative_path: str) -> None:
|
||||
"""
|
||||
Create a folder (and all parents) in Nextcloud via MKCOL.
|
||||
Includes the base_path segments so the full hierarchy is created from scratch.
|
||||
Silently succeeds if folders already exist.
|
||||
"""
|
||||
# Build the complete path list: base_path segments + relative_path segments
|
||||
base_parts = settings.nextcloud_base_path.strip("/").split("/")
|
||||
rel_parts = relative_path.strip("/").split("/") if relative_path.strip("/") else []
|
||||
all_parts = base_parts + rel_parts
|
||||
|
||||
dav_root = f"{settings.nextcloud_url.rstrip('/')}/remote.php/dav/files/{_dav_user()}"
|
||||
client = _get_client()
|
||||
built = ""
|
||||
for part in all_parts:
|
||||
built = f"{built}/{part}" if built else part
|
||||
url = f"{dav_root}/{built}"
|
||||
resp = await client.request("MKCOL", url, auth=_auth())
|
||||
# 201 = created, 405/409 = already exists — both are fine
|
||||
if resp.status_code not in (201, 405, 409):
|
||||
raise HTTPException(
|
||||
status_code=502,
|
||||
detail=f"Failed to create Nextcloud folder '{built}': {resp.status_code}",
|
||||
)
|
||||
|
||||
|
||||
async def write_info_file(customer_folder: str, customer_name: str, customer_id: str) -> None:
|
||||
"""Write a _info.txt stub into a new customer folder for human browsability."""
|
||||
content = f"Customer: {customer_name}\nID: {customer_id}\n"
|
||||
await upload_file(
|
||||
f"{customer_folder}/_info.txt",
|
||||
content.encode("utf-8"),
|
||||
"text/plain",
|
||||
)
|
||||
|
||||
|
||||
_PROPFIND_BODY = b"""<?xml version="1.0"?>
|
||||
<D:propfind xmlns:D="DAV:">
|
||||
<D:prop>
|
||||
<D:resourcetype/>
|
||||
<D:getcontenttype/>
|
||||
<D:getcontentlength/>
|
||||
<D:getlastmodified/>
|
||||
</D:prop>
|
||||
</D:propfind>"""
|
||||
|
||||
|
||||
async def list_folder(relative_path: str) -> List[dict]:
|
||||
"""
|
||||
PROPFIND at depth=1 to list a folder's immediate children.
|
||||
relative_path is relative to nextcloud_base_path.
|
||||
"""
|
||||
url = _full_url(relative_path)
|
||||
client = _get_client()
|
||||
resp = await client.request(
|
||||
"PROPFIND",
|
||||
url,
|
||||
auth=_auth(),
|
||||
headers={"Depth": "1", "Content-Type": "application/xml"},
|
||||
content=_PROPFIND_BODY,
|
||||
)
|
||||
if resp.status_code == 404:
|
||||
return []
|
||||
if resp.status_code not in (207, 200):
|
||||
raise HTTPException(status_code=502, detail=f"Nextcloud PROPFIND failed: {resp.status_code}")
|
||||
return _parse_propfind(resp.content, relative_path)
|
||||
|
||||
|
||||
async def list_folder_recursive(relative_path: str) -> List[dict]:
|
||||
"""
|
||||
Recursively list ALL files under a folder (any depth).
|
||||
Tries Depth:infinity first (single call). Falls back to manual recursion
|
||||
via Depth:1 if the server returns 403/400 (some servers disable infinity).
|
||||
Returns only file entries (is_dir=False).
|
||||
"""
|
||||
url = _full_url(relative_path)
|
||||
client = _get_client()
|
||||
resp = await client.request(
|
||||
"PROPFIND",
|
||||
url,
|
||||
auth=_auth(),
|
||||
headers={"Depth": "infinity", "Content-Type": "application/xml"},
|
||||
content=_PROPFIND_BODY,
|
||||
)
|
||||
|
||||
if resp.status_code in (207, 200):
|
||||
all_items = _parse_propfind(resp.content, relative_path)
|
||||
return [item for item in all_items if not item["is_dir"]]
|
||||
|
||||
# Depth:infinity not supported — fall back to recursive Depth:1
|
||||
if resp.status_code in (403, 400, 412):
|
||||
return await _list_recursive_fallback(relative_path)
|
||||
|
||||
if resp.status_code == 404:
|
||||
return []
|
||||
|
||||
raise HTTPException(status_code=502, detail=f"Nextcloud PROPFIND failed: {resp.status_code}")
|
||||
|
||||
|
||||
async def _list_recursive_fallback(relative_path: str) -> List[dict]:
|
||||
"""Manually recurse via Depth:1 calls when Depth:infinity is blocked."""
|
||||
items = await list_folder(relative_path)
|
||||
files = []
|
||||
dirs = []
|
||||
for item in items:
|
||||
if item["is_dir"]:
|
||||
dirs.append(item["path"])
|
||||
else:
|
||||
files.append(item)
|
||||
for dir_path in dirs:
|
||||
child_files = await _list_recursive_fallback(dir_path)
|
||||
files.extend(child_files)
|
||||
return files
|
||||
|
||||
|
||||
async def upload_file(relative_path: str, content: bytes, mime_type: str) -> str:
|
||||
"""
|
||||
PUT a file to Nextcloud. Returns the relative_path on success.
|
||||
relative_path includes filename, e.g. "customers/abc123/media/photo.jpg"
|
||||
"""
|
||||
url = _full_url(relative_path)
|
||||
client = _get_client()
|
||||
resp = await client.put(
|
||||
url,
|
||||
auth=_auth(),
|
||||
content=content,
|
||||
headers={"Content-Type": mime_type},
|
||||
)
|
||||
if resp.status_code not in (200, 201, 204):
|
||||
raise HTTPException(status_code=502, detail=f"Nextcloud upload failed: {resp.status_code}")
|
||||
return relative_path
|
||||
|
||||
|
||||
async def download_file(relative_path: str) -> tuple[bytes, str]:
|
||||
"""
|
||||
GET a file from Nextcloud. Returns (bytes, mime_type).
|
||||
"""
|
||||
url = _full_url(relative_path)
|
||||
client = _get_client()
|
||||
resp = await client.get(url, auth=_auth())
|
||||
if resp.status_code == 404:
|
||||
raise HTTPException(status_code=404, detail="File not found in Nextcloud")
|
||||
if resp.status_code != 200:
|
||||
raise HTTPException(status_code=502, detail=f"Nextcloud download failed: {resp.status_code}")
|
||||
mime = resp.headers.get("content-type", "application/octet-stream").split(";")[0].strip()
|
||||
return resp.content, mime
|
||||
|
||||
|
||||
async def delete_file(relative_path: str) -> None:
|
||||
"""DELETE a file from Nextcloud."""
|
||||
url = _full_url(relative_path)
|
||||
client = _get_client()
|
||||
resp = await client.request("DELETE", url, auth=_auth())
|
||||
if resp.status_code not in (200, 204, 404):
|
||||
raise HTTPException(status_code=502, detail=f"Nextcloud delete failed: {resp.status_code}")
|
||||
305
backend/crm/nextcloud_router.py
Normal file
305
backend/crm/nextcloud_router.py
Normal file
@@ -0,0 +1,305 @@
|
||||
"""
|
||||
Nextcloud WebDAV proxy endpoints.
|
||||
|
||||
Folder convention (all paths relative to nextcloud_base_path = BellSystems/Console):
|
||||
customers/{folder_id}/media/
|
||||
customers/{folder_id}/documents/
|
||||
customers/{folder_id}/sent/
|
||||
customers/{folder_id}/received/
|
||||
|
||||
folder_id = customer.folder_id if set, else customer.id (legacy fallback).
|
||||
"""
|
||||
from fastapi import APIRouter, Depends, Query, UploadFile, File, Form, Response, HTTPException, Request
|
||||
from typing import Optional
|
||||
|
||||
from jose import JWTError
|
||||
from auth.models import TokenPayload
|
||||
from auth.dependencies import require_permission
|
||||
from auth.utils import decode_access_token
|
||||
from crm import nextcloud, service
|
||||
from crm.models import MediaCreate, MediaDirection
|
||||
|
||||
router = APIRouter(prefix="/api/crm/nextcloud", tags=["crm-nextcloud"])
|
||||
|
||||
DIRECTION_MAP = {
|
||||
"sent": MediaDirection.sent,
|
||||
"received": MediaDirection.received,
|
||||
"internal": MediaDirection.internal,
|
||||
"media": MediaDirection.internal,
|
||||
"documents": MediaDirection.internal,
|
||||
}
|
||||
|
||||
|
||||
@router.get("/browse")
|
||||
async def browse(
|
||||
path: str = Query(..., description="Path relative to nextcloud_base_path"),
|
||||
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||
):
|
||||
"""List immediate children of a Nextcloud folder."""
|
||||
items = await nextcloud.list_folder(path)
|
||||
return {"path": path, "items": items}
|
||||
|
||||
|
||||
@router.get("/browse-all")
|
||||
async def browse_all(
|
||||
customer_id: str = Query(..., description="Customer ID"),
|
||||
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||
):
|
||||
"""
|
||||
Recursively list ALL files for a customer across all subfolders and any depth.
|
||||
Uses Depth:infinity (one WebDAV call) with automatic fallback to recursive Depth:1.
|
||||
Each file item includes a 'subfolder' key derived from its path.
|
||||
"""
|
||||
customer = service.get_customer(customer_id)
|
||||
nc_path = service.get_customer_nc_path(customer)
|
||||
base = f"customers/{nc_path}"
|
||||
|
||||
all_files = await nextcloud.list_folder_recursive(base)
|
||||
|
||||
# Tag each file with the top-level subfolder it lives under
|
||||
for item in all_files:
|
||||
parts = item["path"].split("/")
|
||||
# path looks like: customers/{nc_path}/{subfolder}/[...]/filename
|
||||
# parts[0]=customers, parts[1]={nc_path}, parts[2]={subfolder}
|
||||
item["subfolder"] = parts[2] if len(parts) > 2 else "other"
|
||||
|
||||
return {"items": all_files}
|
||||
|
||||
|
||||
@router.get("/file")
|
||||
async def proxy_file(
|
||||
request: Request,
|
||||
path: str = Query(..., description="Path relative to nextcloud_base_path"),
|
||||
token: Optional[str] = Query(None, description="JWT token for browser-native requests (img src, video src, a href) that cannot send an Authorization header"),
|
||||
):
|
||||
"""
|
||||
Stream a file from Nextcloud through the backend (proxy).
|
||||
Supports HTTP Range requests so videos can be seeked and start playing immediately.
|
||||
Accepts auth via Authorization: Bearer header OR ?token= query param.
|
||||
"""
|
||||
if token is None:
|
||||
raise HTTPException(status_code=403, detail="Not authenticated")
|
||||
try:
|
||||
decode_access_token(token)
|
||||
except (JWTError, KeyError):
|
||||
raise HTTPException(status_code=403, detail="Invalid token")
|
||||
|
||||
content, mime_type = await nextcloud.download_file(path)
|
||||
total = len(content)
|
||||
|
||||
range_header = request.headers.get("range")
|
||||
if range_header and range_header.startswith("bytes="):
|
||||
# Parse "bytes=start-end"
|
||||
try:
|
||||
range_spec = range_header[6:]
|
||||
start_str, _, end_str = range_spec.partition("-")
|
||||
start = int(start_str) if start_str else 0
|
||||
end = int(end_str) if end_str else total - 1
|
||||
end = min(end, total - 1)
|
||||
chunk = content[start:end + 1]
|
||||
headers = {
|
||||
"Content-Range": f"bytes {start}-{end}/{total}",
|
||||
"Accept-Ranges": "bytes",
|
||||
"Content-Length": str(len(chunk)),
|
||||
"Content-Type": mime_type,
|
||||
}
|
||||
return Response(content=chunk, status_code=206, headers=headers, media_type=mime_type)
|
||||
except (ValueError, IndexError):
|
||||
pass
|
||||
|
||||
return Response(
|
||||
content=content,
|
||||
media_type=mime_type,
|
||||
headers={"Accept-Ranges": "bytes", "Content-Length": str(total)},
|
||||
)
|
||||
|
||||
|
||||
@router.put("/file-put")
|
||||
async def put_file(
|
||||
request: Request,
|
||||
path: str = Query(..., description="Path relative to nextcloud_base_path"),
|
||||
token: Optional[str] = Query(None),
|
||||
):
|
||||
"""
|
||||
Overwrite a file in Nextcloud with a new body (used for TXT in-browser editing).
|
||||
Auth via ?token= query param (same pattern as /file GET).
|
||||
"""
|
||||
if token is None:
|
||||
raise HTTPException(status_code=403, detail="Not authenticated")
|
||||
try:
|
||||
decode_access_token(token)
|
||||
except (JWTError, KeyError):
|
||||
raise HTTPException(status_code=403, detail="Invalid token")
|
||||
|
||||
body = await request.body()
|
||||
content_type = request.headers.get("content-type", "text/plain")
|
||||
await nextcloud.upload_file(path, body, content_type)
|
||||
return {"updated": path}
|
||||
|
||||
|
||||
@router.post("/upload")
|
||||
async def upload_file(
|
||||
file: UploadFile = File(...),
|
||||
customer_id: str = Form(...),
|
||||
subfolder: str = Form("media"), # "media" | "documents" | "sent" | "received"
|
||||
direction: Optional[str] = Form(None),
|
||||
tags: Optional[str] = Form(None),
|
||||
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||
):
|
||||
"""
|
||||
Upload a file to the customer's Nextcloud folder and record it in crm_media.
|
||||
Uses the customer's folder_id as the NC path (falls back to UUID for legacy records).
|
||||
"""
|
||||
customer = service.get_customer(customer_id)
|
||||
nc_path = service.get_customer_nc_path(customer)
|
||||
|
||||
target_folder = f"customers/{nc_path}/{subfolder}"
|
||||
file_path = f"{target_folder}/{file.filename}"
|
||||
|
||||
# Ensure the target subfolder exists (idempotent, fast for existing folders)
|
||||
await nextcloud.ensure_folder(target_folder)
|
||||
|
||||
# Read and upload
|
||||
content = await file.read()
|
||||
mime_type = file.content_type or "application/octet-stream"
|
||||
await nextcloud.upload_file(file_path, content, mime_type)
|
||||
|
||||
# Resolve direction
|
||||
resolved_direction = None
|
||||
if direction:
|
||||
try:
|
||||
resolved_direction = MediaDirection(direction)
|
||||
except ValueError:
|
||||
resolved_direction = DIRECTION_MAP.get(subfolder, MediaDirection.internal)
|
||||
else:
|
||||
resolved_direction = DIRECTION_MAP.get(subfolder, MediaDirection.internal)
|
||||
|
||||
# Save metadata record
|
||||
tag_list = [t.strip() for t in tags.split(",")] if tags else []
|
||||
media_record = await service.create_media(MediaCreate(
|
||||
customer_id=customer_id,
|
||||
filename=file.filename,
|
||||
nextcloud_path=file_path,
|
||||
mime_type=mime_type,
|
||||
direction=resolved_direction,
|
||||
tags=tag_list,
|
||||
uploaded_by=_user.name,
|
||||
))
|
||||
|
||||
return media_record
|
||||
|
||||
|
||||
@router.delete("/file")
|
||||
async def delete_file(
|
||||
path: str = Query(..., description="Path relative to nextcloud_base_path"),
|
||||
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||
):
|
||||
"""Delete a file from Nextcloud and remove the matching crm_media record if found."""
|
||||
await nextcloud.delete_file(path)
|
||||
|
||||
# Best-effort: delete the DB record if one matches this path
|
||||
media_list = await service.list_media()
|
||||
for m in media_list:
|
||||
if m.nextcloud_path == path:
|
||||
try:
|
||||
await service.delete_media(m.id)
|
||||
except Exception:
|
||||
pass
|
||||
break
|
||||
|
||||
return {"deleted": path}
|
||||
|
||||
|
||||
@router.post("/init-customer-folder")
|
||||
async def init_customer_folder(
|
||||
customer_id: str = Form(...),
|
||||
customer_name: str = Form(...),
|
||||
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||
):
|
||||
"""
|
||||
Create the standard folder structure for a customer in Nextcloud
|
||||
and write an _info.txt stub for human readability.
|
||||
"""
|
||||
customer = service.get_customer(customer_id)
|
||||
nc_path = service.get_customer_nc_path(customer)
|
||||
base = f"customers/{nc_path}"
|
||||
for sub in ("media", "documents", "sent", "received"):
|
||||
await nextcloud.ensure_folder(f"{base}/{sub}")
|
||||
await nextcloud.write_info_file(base, customer_name, customer_id)
|
||||
return {"initialized": base}
|
||||
|
||||
|
||||
@router.post("/sync")
|
||||
async def sync_nextcloud_files(
|
||||
customer_id: str = Form(...),
|
||||
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||
):
|
||||
"""
|
||||
Scan the customer's Nextcloud folder and register any files not yet tracked in the DB.
|
||||
Returns counts of newly synced and skipped (already tracked) files.
|
||||
"""
|
||||
customer = service.get_customer(customer_id)
|
||||
nc_path = service.get_customer_nc_path(customer)
|
||||
base = f"customers/{nc_path}"
|
||||
|
||||
# Collect all NC files recursively (handles nested folders at any depth)
|
||||
all_nc_files = await nextcloud.list_folder_recursive(base)
|
||||
for item in all_nc_files:
|
||||
parts = item["path"].split("/")
|
||||
item["_subfolder"] = parts[2] if len(parts) > 2 else "media"
|
||||
|
||||
# Get existing DB records for this customer
|
||||
existing = await service.list_media(customer_id=customer_id)
|
||||
tracked_paths = {m.nextcloud_path for m in existing}
|
||||
|
||||
synced = 0
|
||||
skipped = 0
|
||||
for f in all_nc_files:
|
||||
if f["path"] in tracked_paths:
|
||||
skipped += 1
|
||||
continue
|
||||
sub = f["_subfolder"]
|
||||
direction = DIRECTION_MAP.get(sub, MediaDirection.internal)
|
||||
await service.create_media(MediaCreate(
|
||||
customer_id=customer_id,
|
||||
filename=f["filename"],
|
||||
nextcloud_path=f["path"],
|
||||
mime_type=f.get("mime_type") or "application/octet-stream",
|
||||
direction=direction,
|
||||
tags=[],
|
||||
uploaded_by="nextcloud-sync",
|
||||
))
|
||||
synced += 1
|
||||
|
||||
return {"synced": synced, "skipped": skipped}
|
||||
|
||||
|
||||
@router.post("/untrack-deleted")
|
||||
async def untrack_deleted_files(
|
||||
customer_id: str = Form(...),
|
||||
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||
):
|
||||
"""
|
||||
Remove DB records for files that no longer exist in Nextcloud.
|
||||
Returns count of untracked records.
|
||||
"""
|
||||
customer = service.get_customer(customer_id)
|
||||
nc_path = service.get_customer_nc_path(customer)
|
||||
base = f"customers/{nc_path}"
|
||||
|
||||
# Collect all NC file paths recursively
|
||||
all_nc_files = await nextcloud.list_folder_recursive(base)
|
||||
nc_paths = {item["path"] for item in all_nc_files}
|
||||
|
||||
# Find DB records whose NC path no longer exists
|
||||
existing = await service.list_media(customer_id=customer_id)
|
||||
untracked = 0
|
||||
for m in existing:
|
||||
if m.nextcloud_path and m.nextcloud_path not in nc_paths:
|
||||
try:
|
||||
await service.delete_media(m.id)
|
||||
untracked += 1
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return {"untracked": untracked}
|
||||
57
backend/crm/orders_router.py
Normal file
57
backend/crm/orders_router.py
Normal file
@@ -0,0 +1,57 @@
|
||||
from fastapi import APIRouter, Depends, Query
|
||||
from typing import Optional
|
||||
|
||||
from auth.models import TokenPayload
|
||||
from auth.dependencies import require_permission
|
||||
from crm.models import OrderCreate, OrderUpdate, OrderInDB, OrderListResponse
|
||||
from crm import service
|
||||
|
||||
router = APIRouter(prefix="/api/crm/orders", tags=["crm-orders"])
|
||||
|
||||
|
||||
@router.get("", response_model=OrderListResponse)
|
||||
def list_orders(
|
||||
customer_id: Optional[str] = Query(None),
|
||||
status: Optional[str] = Query(None),
|
||||
payment_status: Optional[str] = Query(None),
|
||||
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||
):
|
||||
orders = service.list_orders(
|
||||
customer_id=customer_id,
|
||||
status=status,
|
||||
payment_status=payment_status,
|
||||
)
|
||||
return OrderListResponse(orders=orders, total=len(orders))
|
||||
|
||||
|
||||
@router.get("/{order_id}", response_model=OrderInDB)
|
||||
def get_order(
|
||||
order_id: str,
|
||||
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||
):
|
||||
return service.get_order(order_id)
|
||||
|
||||
|
||||
@router.post("", response_model=OrderInDB, status_code=201)
|
||||
def create_order(
|
||||
body: OrderCreate,
|
||||
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||
):
|
||||
return service.create_order(body)
|
||||
|
||||
|
||||
@router.put("/{order_id}", response_model=OrderInDB)
|
||||
def update_order(
|
||||
order_id: str,
|
||||
body: OrderUpdate,
|
||||
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||
):
|
||||
return service.update_order(order_id, body)
|
||||
|
||||
|
||||
@router.delete("/{order_id}", status_code=204)
|
||||
def delete_order(
|
||||
order_id: str,
|
||||
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||
):
|
||||
service.delete_order(order_id)
|
||||
141
backend/crm/quotation_models.py
Normal file
141
backend/crm/quotation_models.py
Normal file
@@ -0,0 +1,141 @@
|
||||
from enum import Enum
|
||||
from typing import Any, Dict, List, Optional
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class QuotationStatus(str, Enum):
|
||||
draft = "draft"
|
||||
sent = "sent"
|
||||
accepted = "accepted"
|
||||
rejected = "rejected"
|
||||
|
||||
|
||||
class QuotationItemCreate(BaseModel):
|
||||
product_id: Optional[str] = None
|
||||
description: Optional[str] = None
|
||||
unit_type: str = "pcs" # pcs / kg / m
|
||||
unit_cost: float = 0.0
|
||||
discount_percent: float = 0.0
|
||||
quantity: float = 1.0
|
||||
vat_percent: float = 24.0
|
||||
sort_order: int = 0
|
||||
|
||||
|
||||
class QuotationItemInDB(QuotationItemCreate):
|
||||
id: str
|
||||
quotation_id: str
|
||||
line_total: float = 0.0
|
||||
|
||||
|
||||
class QuotationCreate(BaseModel):
|
||||
customer_id: str
|
||||
title: Optional[str] = None
|
||||
subtitle: Optional[str] = None
|
||||
language: str = "en" # en / gr
|
||||
order_type: Optional[str] = None
|
||||
shipping_method: Optional[str] = None
|
||||
estimated_shipping_date: Optional[str] = None
|
||||
global_discount_label: Optional[str] = None
|
||||
global_discount_percent: float = 0.0
|
||||
shipping_cost: float = 0.0
|
||||
shipping_cost_discount: float = 0.0
|
||||
install_cost: float = 0.0
|
||||
install_cost_discount: float = 0.0
|
||||
extras_label: Optional[str] = None
|
||||
extras_cost: float = 0.0
|
||||
comments: List[str] = []
|
||||
quick_notes: Optional[Dict[str, Any]] = None
|
||||
items: List[QuotationItemCreate] = []
|
||||
# Client override fields (for this quotation only; customer record is not modified)
|
||||
client_org: Optional[str] = None
|
||||
client_name: Optional[str] = None
|
||||
client_location: Optional[str] = None
|
||||
client_phone: Optional[str] = None
|
||||
client_email: Optional[str] = None
|
||||
|
||||
|
||||
class QuotationUpdate(BaseModel):
|
||||
title: Optional[str] = None
|
||||
subtitle: Optional[str] = None
|
||||
language: Optional[str] = None
|
||||
status: Optional[QuotationStatus] = None
|
||||
order_type: Optional[str] = None
|
||||
shipping_method: Optional[str] = None
|
||||
estimated_shipping_date: Optional[str] = None
|
||||
global_discount_label: Optional[str] = None
|
||||
global_discount_percent: Optional[float] = None
|
||||
shipping_cost: Optional[float] = None
|
||||
shipping_cost_discount: Optional[float] = None
|
||||
install_cost: Optional[float] = None
|
||||
install_cost_discount: Optional[float] = None
|
||||
extras_label: Optional[str] = None
|
||||
extras_cost: Optional[float] = None
|
||||
comments: Optional[List[str]] = None
|
||||
quick_notes: Optional[Dict[str, Any]] = None
|
||||
items: Optional[List[QuotationItemCreate]] = None
|
||||
# Client override fields
|
||||
client_org: Optional[str] = None
|
||||
client_name: Optional[str] = None
|
||||
client_location: Optional[str] = None
|
||||
client_phone: Optional[str] = None
|
||||
client_email: Optional[str] = None
|
||||
|
||||
|
||||
class QuotationInDB(BaseModel):
|
||||
id: str
|
||||
quotation_number: str
|
||||
customer_id: str
|
||||
title: Optional[str] = None
|
||||
subtitle: Optional[str] = None
|
||||
language: str = "en"
|
||||
status: QuotationStatus = QuotationStatus.draft
|
||||
order_type: Optional[str] = None
|
||||
shipping_method: Optional[str] = None
|
||||
estimated_shipping_date: Optional[str] = None
|
||||
global_discount_label: Optional[str] = None
|
||||
global_discount_percent: float = 0.0
|
||||
shipping_cost: float = 0.0
|
||||
shipping_cost_discount: float = 0.0
|
||||
install_cost: float = 0.0
|
||||
install_cost_discount: float = 0.0
|
||||
extras_label: Optional[str] = None
|
||||
extras_cost: float = 0.0
|
||||
comments: List[str] = []
|
||||
quick_notes: Dict[str, Any] = {}
|
||||
subtotal_before_discount: float = 0.0
|
||||
global_discount_amount: float = 0.0
|
||||
new_subtotal: float = 0.0
|
||||
vat_amount: float = 0.0
|
||||
final_total: float = 0.0
|
||||
nextcloud_pdf_path: Optional[str] = None
|
||||
nextcloud_pdf_url: Optional[str] = None
|
||||
created_at: str
|
||||
updated_at: str
|
||||
items: List[QuotationItemInDB] = []
|
||||
# Client override fields
|
||||
client_org: Optional[str] = None
|
||||
client_name: Optional[str] = None
|
||||
client_location: Optional[str] = None
|
||||
client_phone: Optional[str] = None
|
||||
client_email: Optional[str] = None
|
||||
|
||||
|
||||
class QuotationListItem(BaseModel):
|
||||
id: str
|
||||
quotation_number: str
|
||||
title: Optional[str] = None
|
||||
customer_id: str
|
||||
status: QuotationStatus
|
||||
final_total: float
|
||||
created_at: str
|
||||
updated_at: str
|
||||
nextcloud_pdf_url: Optional[str] = None
|
||||
|
||||
|
||||
class QuotationListResponse(BaseModel):
|
||||
quotations: List[QuotationListItem]
|
||||
total: int
|
||||
|
||||
|
||||
class NextNumberResponse(BaseModel):
|
||||
next_number: str
|
||||
101
backend/crm/quotations_router.py
Normal file
101
backend/crm/quotations_router.py
Normal file
@@ -0,0 +1,101 @@
|
||||
from fastapi import APIRouter, Depends, Query
|
||||
from fastapi.responses import StreamingResponse
|
||||
from typing import Optional
|
||||
import io
|
||||
|
||||
from auth.dependencies import require_permission
|
||||
from auth.models import TokenPayload
|
||||
from crm.quotation_models import (
|
||||
NextNumberResponse,
|
||||
QuotationCreate,
|
||||
QuotationInDB,
|
||||
QuotationListResponse,
|
||||
QuotationUpdate,
|
||||
)
|
||||
from crm import quotations_service as svc
|
||||
|
||||
router = APIRouter(prefix="/api/crm/quotations", tags=["crm-quotations"])
|
||||
|
||||
|
||||
# IMPORTANT: Static paths must come BEFORE /{id} to avoid route collision in FastAPI
|
||||
|
||||
@router.get("/next-number", response_model=NextNumberResponse)
|
||||
async def get_next_number(
|
||||
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||
):
|
||||
"""Returns the next available quotation number (preview only — does not commit)."""
|
||||
next_num = await svc.get_next_number()
|
||||
return NextNumberResponse(next_number=next_num)
|
||||
|
||||
|
||||
@router.get("/customer/{customer_id}", response_model=QuotationListResponse)
|
||||
async def list_quotations_for_customer(
|
||||
customer_id: str,
|
||||
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||
):
|
||||
quotations = await svc.list_quotations(customer_id)
|
||||
return QuotationListResponse(quotations=quotations, total=len(quotations))
|
||||
|
||||
|
||||
@router.get("/{quotation_id}/pdf")
|
||||
async def proxy_quotation_pdf(
|
||||
quotation_id: str,
|
||||
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||
):
|
||||
"""Proxy the quotation PDF from Nextcloud to bypass browser cookie restrictions."""
|
||||
pdf_bytes = await svc.get_quotation_pdf_bytes(quotation_id)
|
||||
return StreamingResponse(
|
||||
io.BytesIO(pdf_bytes),
|
||||
media_type="application/pdf",
|
||||
headers={"Content-Disposition": "inline"},
|
||||
)
|
||||
|
||||
|
||||
@router.get("/{quotation_id}", response_model=QuotationInDB)
|
||||
async def get_quotation(
|
||||
quotation_id: str,
|
||||
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||
):
|
||||
return await svc.get_quotation(quotation_id)
|
||||
|
||||
|
||||
@router.post("", response_model=QuotationInDB, status_code=201)
|
||||
async def create_quotation(
|
||||
body: QuotationCreate,
|
||||
generate_pdf: bool = Query(False),
|
||||
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||
):
|
||||
"""
|
||||
Create a quotation. Pass ?generate_pdf=true to immediately generate and upload the PDF.
|
||||
"""
|
||||
return await svc.create_quotation(body, generate_pdf=generate_pdf)
|
||||
|
||||
|
||||
@router.put("/{quotation_id}", response_model=QuotationInDB)
|
||||
async def update_quotation(
|
||||
quotation_id: str,
|
||||
body: QuotationUpdate,
|
||||
generate_pdf: bool = Query(False),
|
||||
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||
):
|
||||
"""
|
||||
Update a quotation. Pass ?generate_pdf=true to regenerate the PDF.
|
||||
"""
|
||||
return await svc.update_quotation(quotation_id, body, generate_pdf=generate_pdf)
|
||||
|
||||
|
||||
@router.delete("/{quotation_id}", status_code=204)
|
||||
async def delete_quotation(
|
||||
quotation_id: str,
|
||||
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||
):
|
||||
await svc.delete_quotation(quotation_id)
|
||||
|
||||
|
||||
@router.post("/{quotation_id}/regenerate-pdf", response_model=QuotationInDB)
|
||||
async def regenerate_pdf(
|
||||
quotation_id: str,
|
||||
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||
):
|
||||
"""Force PDF regeneration and re-upload to Nextcloud."""
|
||||
return await svc.regenerate_pdf(quotation_id)
|
||||
494
backend/crm/quotations_service.py
Normal file
494
backend/crm/quotations_service.py
Normal file
@@ -0,0 +1,494 @@
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from decimal import Decimal, ROUND_HALF_UP
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import HTTPException
|
||||
|
||||
from crm import nextcloud
|
||||
from crm.quotation_models import (
|
||||
QuotationCreate,
|
||||
QuotationInDB,
|
||||
QuotationItemCreate,
|
||||
QuotationItemInDB,
|
||||
QuotationListItem,
|
||||
QuotationUpdate,
|
||||
)
|
||||
from crm.service import get_customer
|
||||
from mqtt import database as mqtt_db
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Path to Jinja2 templates directory (relative to this file)
|
||||
_TEMPLATES_DIR = Path(__file__).parent.parent / "templates"
|
||||
|
||||
|
||||
# ── Helpers ───────────────────────────────────────────────────────────────────
|
||||
|
||||
def _d(value) -> Decimal:
|
||||
"""Convert to Decimal safely."""
|
||||
return Decimal(str(value if value is not None else 0))
|
||||
|
||||
|
||||
def _float(d: Decimal) -> float:
|
||||
"""Round Decimal to 2dp and return as float for storage."""
|
||||
return float(d.quantize(Decimal("0.01"), rounding=ROUND_HALF_UP))
|
||||
|
||||
|
||||
def _calculate_totals(
|
||||
items: list,
|
||||
global_discount_percent: float,
|
||||
shipping_cost: float,
|
||||
shipping_cost_discount: float,
|
||||
install_cost: float,
|
||||
install_cost_discount: float,
|
||||
extras_cost: float,
|
||||
) -> dict:
|
||||
"""
|
||||
Calculate all monetary totals using Decimal arithmetic (ROUND_HALF_UP).
|
||||
VAT is computed per-item from each item's vat_percent field.
|
||||
Shipping and install costs carry 0% VAT.
|
||||
Returns a dict of floats ready for DB storage.
|
||||
"""
|
||||
# Per-line totals and per-item VAT
|
||||
item_totals = []
|
||||
item_vat = Decimal(0)
|
||||
for item in items:
|
||||
cost = _d(item.get("unit_cost", 0))
|
||||
qty = _d(item.get("quantity", 1))
|
||||
disc = _d(item.get("discount_percent", 0))
|
||||
net = cost * qty * (1 - disc / 100)
|
||||
item_totals.append(net)
|
||||
vat_pct = _d(item.get("vat_percent", 24))
|
||||
item_vat += net * (vat_pct / 100)
|
||||
|
||||
# Shipping net (VAT = 0%)
|
||||
ship_gross = _d(shipping_cost)
|
||||
ship_disc = _d(shipping_cost_discount)
|
||||
ship_net = ship_gross * (1 - ship_disc / 100)
|
||||
|
||||
# Install net (VAT = 0%)
|
||||
install_gross = _d(install_cost)
|
||||
install_disc = _d(install_cost_discount)
|
||||
install_net = install_gross * (1 - install_disc / 100)
|
||||
|
||||
subtotal = sum(item_totals, Decimal(0)) + ship_net + install_net
|
||||
|
||||
global_disc_pct = _d(global_discount_percent)
|
||||
global_disc_amount = subtotal * (global_disc_pct / 100)
|
||||
new_subtotal = subtotal - global_disc_amount
|
||||
|
||||
# Global discount proportionally reduces VAT too
|
||||
if subtotal > 0:
|
||||
disc_ratio = new_subtotal / subtotal
|
||||
vat_amount = item_vat * disc_ratio
|
||||
else:
|
||||
vat_amount = Decimal(0)
|
||||
|
||||
extras = _d(extras_cost)
|
||||
final_total = new_subtotal + vat_amount + extras
|
||||
|
||||
return {
|
||||
"subtotal_before_discount": _float(subtotal),
|
||||
"global_discount_amount": _float(global_disc_amount),
|
||||
"new_subtotal": _float(new_subtotal),
|
||||
"vat_amount": _float(vat_amount),
|
||||
"final_total": _float(final_total),
|
||||
}
|
||||
|
||||
|
||||
def _calc_line_total(item) -> float:
|
||||
cost = _d(item.get("unit_cost", 0))
|
||||
qty = _d(item.get("quantity", 1))
|
||||
disc = _d(item.get("discount_percent", 0))
|
||||
return _float(cost * qty * (1 - disc / 100))
|
||||
|
||||
|
||||
async def _generate_quotation_number(db) -> str:
|
||||
year = datetime.utcnow().year
|
||||
prefix = f"QT-{year}-"
|
||||
rows = await db.execute_fetchall(
|
||||
"SELECT quotation_number FROM crm_quotations WHERE quotation_number LIKE ? ORDER BY quotation_number DESC LIMIT 1",
|
||||
(f"{prefix}%",),
|
||||
)
|
||||
if rows:
|
||||
last_num = rows[0][0] # e.g. "QT-2026-012"
|
||||
try:
|
||||
seq = int(last_num[len(prefix):]) + 1
|
||||
except ValueError:
|
||||
seq = 1
|
||||
else:
|
||||
seq = 1
|
||||
return f"{prefix}{seq:03d}"
|
||||
|
||||
|
||||
def _row_to_quotation(row: dict, items: list[dict]) -> QuotationInDB:
|
||||
row = dict(row)
|
||||
row["comments"] = json.loads(row.get("comments") or "[]")
|
||||
row["quick_notes"] = json.loads(row.get("quick_notes") or "{}")
|
||||
item_models = [QuotationItemInDB(**{k: v for k, v in i.items() if k in QuotationItemInDB.model_fields}) for i in items]
|
||||
return QuotationInDB(**{k: v for k, v in row.items() if k in QuotationInDB.model_fields}, items=item_models)
|
||||
|
||||
|
||||
def _row_to_list_item(row: dict) -> QuotationListItem:
|
||||
return QuotationListItem(**{k: v for k, v in dict(row).items() if k in QuotationListItem.model_fields})
|
||||
|
||||
|
||||
async def _fetch_items(db, quotation_id: str) -> list[dict]:
|
||||
rows = await db.execute_fetchall(
|
||||
"SELECT * FROM crm_quotation_items WHERE quotation_id = ? ORDER BY sort_order ASC",
|
||||
(quotation_id,),
|
||||
)
|
||||
return [dict(r) for r in rows]
|
||||
|
||||
|
||||
# ── Public API ────────────────────────────────────────────────────────────────
|
||||
|
||||
async def get_next_number() -> str:
|
||||
db = await mqtt_db.get_db()
|
||||
return await _generate_quotation_number(db)
|
||||
|
||||
|
||||
async def list_quotations(customer_id: str) -> list[QuotationListItem]:
|
||||
db = await mqtt_db.get_db()
|
||||
rows = await db.execute_fetchall(
|
||||
"SELECT id, quotation_number, title, customer_id, status, final_total, created_at, updated_at, nextcloud_pdf_url "
|
||||
"FROM crm_quotations WHERE customer_id = ? ORDER BY created_at DESC",
|
||||
(customer_id,),
|
||||
)
|
||||
return [_row_to_list_item(dict(r)) for r in rows]
|
||||
|
||||
|
||||
async def get_quotation(quotation_id: str) -> QuotationInDB:
|
||||
db = await mqtt_db.get_db()
|
||||
rows = await db.execute_fetchall(
|
||||
"SELECT * FROM crm_quotations WHERE id = ?", (quotation_id,)
|
||||
)
|
||||
if not rows:
|
||||
raise HTTPException(status_code=404, detail="Quotation not found")
|
||||
items = await _fetch_items(db, quotation_id)
|
||||
return _row_to_quotation(dict(rows[0]), items)
|
||||
|
||||
|
||||
async def create_quotation(data: QuotationCreate, generate_pdf: bool = False) -> QuotationInDB:
|
||||
db = await mqtt_db.get_db()
|
||||
now = datetime.utcnow().isoformat()
|
||||
qid = str(uuid.uuid4())
|
||||
quotation_number = await _generate_quotation_number(db)
|
||||
|
||||
# Build items list for calculation
|
||||
items_raw = [item.model_dump() for item in data.items]
|
||||
|
||||
# Calculate per-item line totals
|
||||
for item in items_raw:
|
||||
item["line_total"] = _calc_line_total(item)
|
||||
|
||||
totals = _calculate_totals(
|
||||
items_raw,
|
||||
data.global_discount_percent,
|
||||
data.shipping_cost,
|
||||
data.shipping_cost_discount,
|
||||
data.install_cost,
|
||||
data.install_cost_discount,
|
||||
data.extras_cost,
|
||||
)
|
||||
|
||||
comments_json = json.dumps(data.comments)
|
||||
quick_notes_json = json.dumps(data.quick_notes or {})
|
||||
|
||||
await db.execute(
|
||||
"""INSERT INTO crm_quotations (
|
||||
id, quotation_number, title, subtitle, customer_id,
|
||||
language, status, order_type, shipping_method, estimated_shipping_date,
|
||||
global_discount_label, global_discount_percent,
|
||||
shipping_cost, shipping_cost_discount, install_cost, install_cost_discount,
|
||||
extras_label, extras_cost, comments, quick_notes,
|
||||
subtotal_before_discount, global_discount_amount, new_subtotal, vat_amount, final_total,
|
||||
nextcloud_pdf_path, nextcloud_pdf_url,
|
||||
client_org, client_name, client_location, client_phone, client_email,
|
||||
created_at, updated_at
|
||||
) VALUES (
|
||||
?, ?, ?, ?, ?,
|
||||
?, 'draft', ?, ?, ?,
|
||||
?, ?,
|
||||
?, ?, ?, ?,
|
||||
?, ?, ?, ?,
|
||||
?, ?, ?, ?, ?,
|
||||
NULL, NULL,
|
||||
?, ?, ?, ?, ?,
|
||||
?, ?
|
||||
)""",
|
||||
(
|
||||
qid, quotation_number, data.title, data.subtitle, data.customer_id,
|
||||
data.language, data.order_type, data.shipping_method, data.estimated_shipping_date,
|
||||
data.global_discount_label, data.global_discount_percent,
|
||||
data.shipping_cost, data.shipping_cost_discount, data.install_cost, data.install_cost_discount,
|
||||
data.extras_label, data.extras_cost, comments_json, quick_notes_json,
|
||||
totals["subtotal_before_discount"], totals["global_discount_amount"],
|
||||
totals["new_subtotal"], totals["vat_amount"], totals["final_total"],
|
||||
data.client_org, data.client_name, data.client_location, data.client_phone, data.client_email,
|
||||
now, now,
|
||||
),
|
||||
)
|
||||
|
||||
# Insert items
|
||||
for i, item in enumerate(items_raw):
|
||||
item_id = str(uuid.uuid4())
|
||||
await db.execute(
|
||||
"""INSERT INTO crm_quotation_items
|
||||
(id, quotation_id, product_id, description, unit_type, unit_cost,
|
||||
discount_percent, quantity, vat_percent, line_total, sort_order)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
|
||||
(
|
||||
item_id, qid, item.get("product_id"), item.get("description"),
|
||||
item.get("unit_type", "pcs"), item.get("unit_cost", 0),
|
||||
item.get("discount_percent", 0), item.get("quantity", 1),
|
||||
item.get("vat_percent", 24), item["line_total"], item.get("sort_order", i),
|
||||
),
|
||||
)
|
||||
|
||||
await db.commit()
|
||||
|
||||
quotation = await get_quotation(qid)
|
||||
|
||||
if generate_pdf:
|
||||
quotation = await _do_generate_and_upload_pdf(quotation)
|
||||
|
||||
return quotation
|
||||
|
||||
|
||||
async def update_quotation(quotation_id: str, data: QuotationUpdate, generate_pdf: bool = False) -> QuotationInDB:
|
||||
db = await mqtt_db.get_db()
|
||||
rows = await db.execute_fetchall(
|
||||
"SELECT * FROM crm_quotations WHERE id = ?", (quotation_id,)
|
||||
)
|
||||
if not rows:
|
||||
raise HTTPException(status_code=404, detail="Quotation not found")
|
||||
|
||||
existing = dict(rows[0])
|
||||
now = datetime.utcnow().isoformat()
|
||||
|
||||
# Merge update into existing values
|
||||
update_fields = data.model_dump(exclude_none=True)
|
||||
|
||||
# Build SET clause — handle comments JSON separately
|
||||
set_parts = []
|
||||
params = []
|
||||
|
||||
scalar_fields = [
|
||||
"title", "subtitle", "language", "status", "order_type", "shipping_method",
|
||||
"estimated_shipping_date", "global_discount_label", "global_discount_percent",
|
||||
"shipping_cost", "shipping_cost_discount", "install_cost",
|
||||
"install_cost_discount", "extras_label", "extras_cost",
|
||||
"client_org", "client_name", "client_location", "client_phone", "client_email",
|
||||
]
|
||||
|
||||
for field in scalar_fields:
|
||||
if field in update_fields:
|
||||
set_parts.append(f"{field} = ?")
|
||||
params.append(update_fields[field])
|
||||
|
||||
if "comments" in update_fields:
|
||||
set_parts.append("comments = ?")
|
||||
params.append(json.dumps(update_fields["comments"]))
|
||||
|
||||
if "quick_notes" in update_fields:
|
||||
set_parts.append("quick_notes = ?")
|
||||
params.append(json.dumps(update_fields["quick_notes"] or {}))
|
||||
|
||||
# Recalculate totals using merged values
|
||||
merged = {**existing, **{k: update_fields.get(k, existing.get(k)) for k in scalar_fields}}
|
||||
|
||||
# If items are being updated, recalculate with new items; otherwise use existing items
|
||||
if "items" in update_fields:
|
||||
items_raw = [item.model_dump() for item in data.items]
|
||||
for item in items_raw:
|
||||
item["line_total"] = _calc_line_total(item)
|
||||
else:
|
||||
existing_items = await _fetch_items(db, quotation_id)
|
||||
items_raw = existing_items
|
||||
|
||||
totals = _calculate_totals(
|
||||
items_raw,
|
||||
float(merged.get("global_discount_percent", 0)),
|
||||
float(merged.get("shipping_cost", 0)),
|
||||
float(merged.get("shipping_cost_discount", 0)),
|
||||
float(merged.get("install_cost", 0)),
|
||||
float(merged.get("install_cost_discount", 0)),
|
||||
float(merged.get("extras_cost", 0)),
|
||||
)
|
||||
|
||||
for field, val in totals.items():
|
||||
set_parts.append(f"{field} = ?")
|
||||
params.append(val)
|
||||
|
||||
set_parts.append("updated_at = ?")
|
||||
params.append(now)
|
||||
params.append(quotation_id)
|
||||
|
||||
if set_parts:
|
||||
await db.execute(
|
||||
f"UPDATE crm_quotations SET {', '.join(set_parts)} WHERE id = ?",
|
||||
params,
|
||||
)
|
||||
|
||||
# Replace items if provided
|
||||
if "items" in update_fields:
|
||||
await db.execute("DELETE FROM crm_quotation_items WHERE quotation_id = ?", (quotation_id,))
|
||||
for i, item in enumerate(items_raw):
|
||||
item_id = str(uuid.uuid4())
|
||||
await db.execute(
|
||||
"""INSERT INTO crm_quotation_items
|
||||
(id, quotation_id, product_id, description, unit_type, unit_cost,
|
||||
discount_percent, quantity, vat_percent, line_total, sort_order)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
|
||||
(
|
||||
item_id, quotation_id, item.get("product_id"), item.get("description"),
|
||||
item.get("unit_type", "pcs"), item.get("unit_cost", 0),
|
||||
item.get("discount_percent", 0), item.get("quantity", 1),
|
||||
item.get("vat_percent", 24), item["line_total"], item.get("sort_order", i),
|
||||
),
|
||||
)
|
||||
|
||||
await db.commit()
|
||||
|
||||
quotation = await get_quotation(quotation_id)
|
||||
|
||||
if generate_pdf:
|
||||
quotation = await _do_generate_and_upload_pdf(quotation)
|
||||
|
||||
return quotation
|
||||
|
||||
|
||||
async def delete_quotation(quotation_id: str) -> None:
|
||||
db = await mqtt_db.get_db()
|
||||
rows = await db.execute_fetchall(
|
||||
"SELECT nextcloud_pdf_path FROM crm_quotations WHERE id = ?", (quotation_id,)
|
||||
)
|
||||
if not rows:
|
||||
raise HTTPException(status_code=404, detail="Quotation not found")
|
||||
|
||||
pdf_path = dict(rows[0]).get("nextcloud_pdf_path")
|
||||
|
||||
await db.execute("DELETE FROM crm_quotation_items WHERE quotation_id = ?", (quotation_id,))
|
||||
await db.execute("DELETE FROM crm_quotations WHERE id = ?", (quotation_id,))
|
||||
await db.commit()
|
||||
|
||||
# Remove PDF from Nextcloud (best-effort)
|
||||
if pdf_path:
|
||||
try:
|
||||
await nextcloud.delete_file(pdf_path)
|
||||
except Exception as e:
|
||||
logger.warning("Failed to delete PDF from Nextcloud (%s): %s", pdf_path, e)
|
||||
|
||||
|
||||
# ── PDF Generation ─────────────────────────────────────────────────────────────
|
||||
|
||||
async def _do_generate_and_upload_pdf(quotation: QuotationInDB) -> QuotationInDB:
|
||||
"""Generate PDF, upload to Nextcloud, update DB record. Returns updated quotation."""
|
||||
try:
|
||||
customer = get_customer(quotation.customer_id)
|
||||
except Exception as e:
|
||||
logger.error("Cannot generate PDF — customer not found: %s", e)
|
||||
return quotation
|
||||
|
||||
try:
|
||||
pdf_bytes = await _generate_pdf_bytes(quotation, customer)
|
||||
except Exception as e:
|
||||
logger.error("PDF generation failed for quotation %s: %s", quotation.id, e)
|
||||
return quotation
|
||||
|
||||
# Delete old PDF if present
|
||||
if quotation.nextcloud_pdf_path:
|
||||
try:
|
||||
await nextcloud.delete_file(quotation.nextcloud_pdf_path)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
pdf_path, pdf_url = await _upload_pdf(customer, quotation, pdf_bytes)
|
||||
except Exception as e:
|
||||
logger.error("PDF upload failed for quotation %s: %s", quotation.id, e)
|
||||
return quotation
|
||||
|
||||
# Persist paths
|
||||
db = await mqtt_db.get_db()
|
||||
await db.execute(
|
||||
"UPDATE crm_quotations SET nextcloud_pdf_path = ?, nextcloud_pdf_url = ? WHERE id = ?",
|
||||
(pdf_path, pdf_url, quotation.id),
|
||||
)
|
||||
await db.commit()
|
||||
|
||||
return await get_quotation(quotation.id)
|
||||
|
||||
|
||||
async def _generate_pdf_bytes(quotation: QuotationInDB, customer) -> bytes:
|
||||
"""Render Jinja2 template and convert to PDF via WeasyPrint."""
|
||||
from jinja2 import Environment, FileSystemLoader, select_autoescape
|
||||
import weasyprint
|
||||
|
||||
env = Environment(
|
||||
loader=FileSystemLoader(str(_TEMPLATES_DIR)),
|
||||
autoescape=select_autoescape(["html"]),
|
||||
)
|
||||
|
||||
def format_money(value):
|
||||
try:
|
||||
f = float(value)
|
||||
# Greek-style: dot thousands separator, comma decimal
|
||||
formatted = f"{f:,.2f}".replace(",", "X").replace(".", ",").replace("X", ".")
|
||||
return f"{formatted} €"
|
||||
except (TypeError, ValueError):
|
||||
return "0,00 €"
|
||||
|
||||
env.filters["format_money"] = format_money
|
||||
|
||||
template = env.get_template("quotation.html")
|
||||
|
||||
html_str = template.render(
|
||||
quotation=quotation,
|
||||
customer=customer,
|
||||
lang=quotation.language,
|
||||
)
|
||||
|
||||
pdf = weasyprint.HTML(string=html_str, base_url=str(_TEMPLATES_DIR)).write_pdf()
|
||||
return pdf
|
||||
|
||||
|
||||
async def _upload_pdf(customer, quotation: QuotationInDB, pdf_bytes: bytes) -> tuple[str, str]:
|
||||
"""Upload PDF to Nextcloud, return (relative_path, public_url)."""
|
||||
from crm.service import get_customer_nc_path
|
||||
from config import settings
|
||||
|
||||
nc_folder = get_customer_nc_path(customer)
|
||||
date_str = datetime.utcnow().strftime("%Y-%m-%d")
|
||||
filename = f"Quotation-{quotation.quotation_number}-{date_str}.pdf"
|
||||
rel_path = f"customers/{nc_folder}/quotations/{filename}"
|
||||
|
||||
await nextcloud.ensure_folder(f"customers/{nc_folder}/quotations")
|
||||
await nextcloud.upload_file(rel_path, pdf_bytes, "application/pdf")
|
||||
|
||||
# Construct a direct WebDAV download URL
|
||||
from crm.nextcloud import _full_url
|
||||
pdf_url = _full_url(rel_path)
|
||||
|
||||
return rel_path, pdf_url
|
||||
|
||||
|
||||
async def regenerate_pdf(quotation_id: str) -> QuotationInDB:
|
||||
quotation = await get_quotation(quotation_id)
|
||||
return await _do_generate_and_upload_pdf(quotation)
|
||||
|
||||
|
||||
async def get_quotation_pdf_bytes(quotation_id: str) -> bytes:
|
||||
"""Download the PDF for a quotation from Nextcloud and return raw bytes."""
|
||||
from fastapi import HTTPException
|
||||
quotation = await get_quotation(quotation_id)
|
||||
if not quotation.nextcloud_pdf_path:
|
||||
raise HTTPException(status_code=404, detail="No PDF generated for this quotation")
|
||||
pdf_bytes, _ = await nextcloud.download_file(quotation.nextcloud_pdf_path)
|
||||
return pdf_bytes
|
||||
93
backend/crm/router.py
Normal file
93
backend/crm/router.py
Normal file
@@ -0,0 +1,93 @@
|
||||
from fastapi import APIRouter, Depends, Query, UploadFile, File, HTTPException
|
||||
from fastapi.responses import FileResponse
|
||||
from typing import Optional
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from auth.models import TokenPayload
|
||||
from auth.dependencies import require_permission
|
||||
from crm.models import ProductCreate, ProductUpdate, ProductInDB, ProductListResponse
|
||||
from crm import service
|
||||
|
||||
router = APIRouter(prefix="/api/crm/products", tags=["crm-products"])
|
||||
|
||||
PHOTO_DIR = os.path.join(os.path.dirname(__file__), "..", "storage", "product_images")
|
||||
os.makedirs(PHOTO_DIR, exist_ok=True)
|
||||
|
||||
|
||||
@router.get("", response_model=ProductListResponse)
|
||||
def list_products(
|
||||
search: Optional[str] = Query(None),
|
||||
category: Optional[str] = Query(None),
|
||||
active_only: bool = Query(False),
|
||||
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||
):
|
||||
products = service.list_products(search=search, category=category, active_only=active_only)
|
||||
return ProductListResponse(products=products, total=len(products))
|
||||
|
||||
|
||||
@router.get("/{product_id}", response_model=ProductInDB)
|
||||
def get_product(
|
||||
product_id: str,
|
||||
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||
):
|
||||
return service.get_product(product_id)
|
||||
|
||||
|
||||
@router.post("", response_model=ProductInDB, status_code=201)
|
||||
def create_product(
|
||||
body: ProductCreate,
|
||||
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||
):
|
||||
return service.create_product(body)
|
||||
|
||||
|
||||
@router.put("/{product_id}", response_model=ProductInDB)
|
||||
def update_product(
|
||||
product_id: str,
|
||||
body: ProductUpdate,
|
||||
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||
):
|
||||
return service.update_product(product_id, body)
|
||||
|
||||
|
||||
@router.delete("/{product_id}", status_code=204)
|
||||
def delete_product(
|
||||
product_id: str,
|
||||
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||
):
|
||||
service.delete_product(product_id)
|
||||
|
||||
|
||||
@router.post("/{product_id}/photo", response_model=ProductInDB)
|
||||
async def upload_product_photo(
|
||||
product_id: str,
|
||||
file: UploadFile = File(...),
|
||||
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||
):
|
||||
"""Upload a product photo. Accepts JPG or PNG, stored on disk."""
|
||||
if file.content_type not in ("image/jpeg", "image/png", "image/webp"):
|
||||
raise HTTPException(status_code=400, detail="Only JPG, PNG, or WebP images are accepted.")
|
||||
ext = {"image/jpeg": "jpg", "image/png": "png", "image/webp": "webp"}.get(file.content_type, "jpg")
|
||||
photo_path = os.path.join(PHOTO_DIR, f"{product_id}.{ext}")
|
||||
# Remove any old photo files for this product
|
||||
for old_ext in ("jpg", "png", "webp"):
|
||||
old_path = os.path.join(PHOTO_DIR, f"{product_id}.{old_ext}")
|
||||
if os.path.exists(old_path) and old_path != photo_path:
|
||||
os.remove(old_path)
|
||||
with open(photo_path, "wb") as f:
|
||||
shutil.copyfileobj(file.file, f)
|
||||
photo_url = f"/crm/products/{product_id}/photo"
|
||||
return service.update_product(product_id, ProductUpdate(photo_url=photo_url))
|
||||
|
||||
|
||||
@router.get("/{product_id}/photo")
|
||||
def get_product_photo(
|
||||
product_id: str,
|
||||
):
|
||||
"""Serve a product photo from disk."""
|
||||
for ext in ("jpg", "png", "webp"):
|
||||
photo_path = os.path.join(PHOTO_DIR, f"{product_id}.{ext}")
|
||||
if os.path.exists(photo_path):
|
||||
return FileResponse(photo_path)
|
||||
raise HTTPException(status_code=404, detail="No photo found for this product.")
|
||||
619
backend/crm/service.py
Normal file
619
backend/crm/service.py
Normal file
@@ -0,0 +1,619 @@
|
||||
import json
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
|
||||
from fastapi import HTTPException
|
||||
from shared.firebase import get_db
|
||||
from shared.exceptions import NotFoundError
|
||||
import re as _re
|
||||
from mqtt import database as mqtt_db
|
||||
from crm.models import (
|
||||
ProductCreate, ProductUpdate, ProductInDB,
|
||||
CustomerCreate, CustomerUpdate, CustomerInDB,
|
||||
OrderCreate, OrderUpdate, OrderInDB,
|
||||
CommCreate, CommUpdate, CommInDB,
|
||||
MediaCreate, MediaInDB,
|
||||
)
|
||||
|
||||
COLLECTION = "crm_products"
|
||||
|
||||
|
||||
def _doc_to_product(doc) -> ProductInDB:
|
||||
data = doc.to_dict()
|
||||
return ProductInDB(id=doc.id, **data)
|
||||
|
||||
|
||||
def list_products(
|
||||
search: str | None = None,
|
||||
category: str | None = None,
|
||||
active_only: bool = False,
|
||||
) -> list[ProductInDB]:
|
||||
db = get_db()
|
||||
query = db.collection(COLLECTION)
|
||||
|
||||
if active_only:
|
||||
query = query.where("active", "==", True)
|
||||
|
||||
if category:
|
||||
query = query.where("category", "==", category)
|
||||
|
||||
results = []
|
||||
for doc in query.stream():
|
||||
product = _doc_to_product(doc)
|
||||
|
||||
if search:
|
||||
s = search.lower()
|
||||
if not (
|
||||
s in (product.name or "").lower()
|
||||
or s in (product.sku or "").lower()
|
||||
or s in (product.description or "").lower()
|
||||
):
|
||||
continue
|
||||
|
||||
results.append(product)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def get_product(product_id: str) -> ProductInDB:
|
||||
db = get_db()
|
||||
doc = db.collection(COLLECTION).document(product_id).get()
|
||||
if not doc.exists:
|
||||
raise NotFoundError("Product")
|
||||
return _doc_to_product(doc)
|
||||
|
||||
|
||||
def create_product(data: ProductCreate) -> ProductInDB:
|
||||
db = get_db()
|
||||
now = datetime.utcnow().isoformat()
|
||||
product_id = str(uuid.uuid4())
|
||||
|
||||
doc_data = data.model_dump()
|
||||
doc_data["created_at"] = now
|
||||
doc_data["updated_at"] = now
|
||||
|
||||
# Serialize nested enums/models
|
||||
if doc_data.get("category"):
|
||||
doc_data["category"] = doc_data["category"].value if hasattr(doc_data["category"], "value") else doc_data["category"]
|
||||
if doc_data.get("costs") and hasattr(doc_data["costs"], "model_dump"):
|
||||
doc_data["costs"] = doc_data["costs"].model_dump()
|
||||
if doc_data.get("stock") and hasattr(doc_data["stock"], "model_dump"):
|
||||
doc_data["stock"] = doc_data["stock"].model_dump()
|
||||
|
||||
db.collection(COLLECTION).document(product_id).set(doc_data)
|
||||
return ProductInDB(id=product_id, **doc_data)
|
||||
|
||||
|
||||
def update_product(product_id: str, data: ProductUpdate) -> ProductInDB:
|
||||
db = get_db()
|
||||
doc_ref = db.collection(COLLECTION).document(product_id)
|
||||
doc = doc_ref.get()
|
||||
if not doc.exists:
|
||||
raise NotFoundError("Product")
|
||||
|
||||
update_data = data.model_dump(exclude_none=True)
|
||||
update_data["updated_at"] = datetime.utcnow().isoformat()
|
||||
|
||||
if "category" in update_data and hasattr(update_data["category"], "value"):
|
||||
update_data["category"] = update_data["category"].value
|
||||
if "costs" in update_data and hasattr(update_data["costs"], "model_dump"):
|
||||
update_data["costs"] = update_data["costs"].model_dump()
|
||||
if "stock" in update_data and hasattr(update_data["stock"], "model_dump"):
|
||||
update_data["stock"] = update_data["stock"].model_dump()
|
||||
|
||||
doc_ref.update(update_data)
|
||||
updated_doc = doc_ref.get()
|
||||
return _doc_to_product(updated_doc)
|
||||
|
||||
|
||||
def delete_product(product_id: str) -> None:
|
||||
db = get_db()
|
||||
doc_ref = db.collection(COLLECTION).document(product_id)
|
||||
doc = doc_ref.get()
|
||||
if not doc.exists:
|
||||
raise NotFoundError("Product")
|
||||
doc_ref.delete()
|
||||
|
||||
|
||||
# ── Customers ────────────────────────────────────────────────────────────────
|
||||
|
||||
CUSTOMERS_COLLECTION = "crm_customers"
|
||||
|
||||
|
||||
def _doc_to_customer(doc) -> CustomerInDB:
|
||||
data = doc.to_dict()
|
||||
return CustomerInDB(id=doc.id, **data)
|
||||
|
||||
|
||||
def list_customers(
|
||||
search: str | None = None,
|
||||
tag: str | None = None,
|
||||
) -> list[CustomerInDB]:
|
||||
db = get_db()
|
||||
query = db.collection(CUSTOMERS_COLLECTION)
|
||||
|
||||
if tag:
|
||||
query = query.where("tags", "array_contains", tag)
|
||||
|
||||
results = []
|
||||
for doc in query.stream():
|
||||
customer = _doc_to_customer(doc)
|
||||
|
||||
if search:
|
||||
s = search.lower()
|
||||
name_match = s in (customer.name or "").lower()
|
||||
surname_match = s in (customer.surname or "").lower()
|
||||
org_match = s in (customer.organization or "").lower()
|
||||
contact_match = any(
|
||||
s in (c.value or "").lower()
|
||||
for c in (customer.contacts or [])
|
||||
)
|
||||
loc = customer.location or {}
|
||||
loc_match = (
|
||||
s in (loc.get("city", "") or "").lower() or
|
||||
s in (loc.get("country", "") or "").lower() or
|
||||
s in (loc.get("region", "") or "").lower()
|
||||
)
|
||||
tag_match = any(s in (t or "").lower() for t in (customer.tags or []))
|
||||
if not (name_match or surname_match or org_match or contact_match or loc_match or tag_match):
|
||||
continue
|
||||
|
||||
results.append(customer)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def get_customer(customer_id: str) -> CustomerInDB:
|
||||
db = get_db()
|
||||
doc = db.collection(CUSTOMERS_COLLECTION).document(customer_id).get()
|
||||
if not doc.exists:
|
||||
raise NotFoundError("Customer")
|
||||
return _doc_to_customer(doc)
|
||||
|
||||
|
||||
def get_customer_nc_path(customer: CustomerInDB) -> str:
|
||||
"""Return the Nextcloud folder slug for a customer. Falls back to UUID for legacy records."""
|
||||
return customer.folder_id if customer.folder_id else customer.id
|
||||
|
||||
|
||||
def create_customer(data: CustomerCreate) -> CustomerInDB:
|
||||
db = get_db()
|
||||
|
||||
# Validate folder_id
|
||||
if not data.folder_id or not data.folder_id.strip():
|
||||
raise HTTPException(status_code=422, detail="Internal Folder ID is required.")
|
||||
folder_id = data.folder_id.strip().lower()
|
||||
if not _re.match(r'^[a-z0-9][a-z0-9\-]*[a-z0-9]$', folder_id):
|
||||
raise HTTPException(
|
||||
status_code=422,
|
||||
detail="Internal Folder ID must contain only lowercase letters, numbers, and hyphens, and cannot start or end with a hyphen.",
|
||||
)
|
||||
# Check uniqueness
|
||||
existing = list(db.collection(CUSTOMERS_COLLECTION).where("folder_id", "==", folder_id).limit(1).stream())
|
||||
if existing:
|
||||
raise HTTPException(status_code=409, detail=f"A customer with folder ID '{folder_id}' already exists.")
|
||||
|
||||
now = datetime.utcnow().isoformat()
|
||||
customer_id = str(uuid.uuid4())
|
||||
|
||||
doc_data = data.model_dump()
|
||||
doc_data["folder_id"] = folder_id
|
||||
doc_data["created_at"] = now
|
||||
doc_data["updated_at"] = now
|
||||
|
||||
db.collection(CUSTOMERS_COLLECTION).document(customer_id).set(doc_data)
|
||||
return CustomerInDB(id=customer_id, **doc_data)
|
||||
|
||||
|
||||
def update_customer(customer_id: str, data: CustomerUpdate) -> CustomerInDB:
|
||||
db = get_db()
|
||||
doc_ref = db.collection(CUSTOMERS_COLLECTION).document(customer_id)
|
||||
doc = doc_ref.get()
|
||||
if not doc.exists:
|
||||
raise NotFoundError("Customer")
|
||||
|
||||
update_data = data.model_dump(exclude_none=True)
|
||||
update_data["updated_at"] = datetime.utcnow().isoformat()
|
||||
|
||||
doc_ref.update(update_data)
|
||||
updated_doc = doc_ref.get()
|
||||
return _doc_to_customer(updated_doc)
|
||||
|
||||
|
||||
def delete_customer(customer_id: str) -> None:
|
||||
db = get_db()
|
||||
doc_ref = db.collection(CUSTOMERS_COLLECTION).document(customer_id)
|
||||
doc = doc_ref.get()
|
||||
if not doc.exists:
|
||||
raise NotFoundError("Customer")
|
||||
doc_ref.delete()
|
||||
|
||||
|
||||
# ── Orders ───────────────────────────────────────────────────────────────────
|
||||
|
||||
ORDERS_COLLECTION = "crm_orders"
|
||||
|
||||
|
||||
def _doc_to_order(doc) -> OrderInDB:
|
||||
data = doc.to_dict()
|
||||
return OrderInDB(id=doc.id, **data)
|
||||
|
||||
|
||||
def _generate_order_number(db) -> str:
|
||||
year = datetime.utcnow().year
|
||||
prefix = f"ORD-{year}-"
|
||||
max_n = 0
|
||||
for doc in db.collection(ORDERS_COLLECTION).stream():
|
||||
data = doc.to_dict()
|
||||
num = data.get("order_number", "")
|
||||
if num and num.startswith(prefix):
|
||||
try:
|
||||
n = int(num[len(prefix):])
|
||||
if n > max_n:
|
||||
max_n = n
|
||||
except ValueError:
|
||||
pass
|
||||
return f"{prefix}{max_n + 1:03d}"
|
||||
|
||||
|
||||
def list_orders(
|
||||
customer_id: str | None = None,
|
||||
status: str | None = None,
|
||||
payment_status: str | None = None,
|
||||
) -> list[OrderInDB]:
|
||||
db = get_db()
|
||||
query = db.collection(ORDERS_COLLECTION)
|
||||
|
||||
if customer_id:
|
||||
query = query.where("customer_id", "==", customer_id)
|
||||
if status:
|
||||
query = query.where("status", "==", status)
|
||||
if payment_status:
|
||||
query = query.where("payment_status", "==", payment_status)
|
||||
|
||||
return [_doc_to_order(doc) for doc in query.stream()]
|
||||
|
||||
|
||||
def get_order(order_id: str) -> OrderInDB:
|
||||
db = get_db()
|
||||
doc = db.collection(ORDERS_COLLECTION).document(order_id).get()
|
||||
if not doc.exists:
|
||||
raise NotFoundError("Order")
|
||||
return _doc_to_order(doc)
|
||||
|
||||
|
||||
def create_order(data: OrderCreate) -> OrderInDB:
|
||||
db = get_db()
|
||||
now = datetime.utcnow().isoformat()
|
||||
order_id = str(uuid.uuid4())
|
||||
|
||||
doc_data = data.model_dump()
|
||||
if not doc_data.get("order_number"):
|
||||
doc_data["order_number"] = _generate_order_number(db)
|
||||
doc_data["created_at"] = now
|
||||
doc_data["updated_at"] = now
|
||||
|
||||
db.collection(ORDERS_COLLECTION).document(order_id).set(doc_data)
|
||||
return OrderInDB(id=order_id, **doc_data)
|
||||
|
||||
|
||||
def update_order(order_id: str, data: OrderUpdate) -> OrderInDB:
|
||||
db = get_db()
|
||||
doc_ref = db.collection(ORDERS_COLLECTION).document(order_id)
|
||||
doc = doc_ref.get()
|
||||
if not doc.exists:
|
||||
raise NotFoundError("Order")
|
||||
|
||||
update_data = data.model_dump(exclude_none=True)
|
||||
update_data["updated_at"] = datetime.utcnow().isoformat()
|
||||
|
||||
doc_ref.update(update_data)
|
||||
updated_doc = doc_ref.get()
|
||||
return _doc_to_order(updated_doc)
|
||||
|
||||
|
||||
def delete_order(order_id: str) -> None:
|
||||
db = get_db()
|
||||
doc_ref = db.collection(ORDERS_COLLECTION).document(order_id)
|
||||
doc = doc_ref.get()
|
||||
if not doc.exists:
|
||||
raise NotFoundError("Order")
|
||||
doc_ref.delete()
|
||||
|
||||
|
||||
# ── Comms Log (SQLite, async) ─────────────────────────────────────────────────
|
||||
|
||||
def _row_to_comm(row: dict) -> CommInDB:
|
||||
row = dict(row)
|
||||
raw_attachments = json.loads(row.get("attachments") or "[]")
|
||||
# Normalise attachment dicts — tolerate both synced (content_type/size) and
|
||||
# sent (nextcloud_path) shapes so Pydantic never sees missing required fields.
|
||||
row["attachments"] = [
|
||||
{k: v for k, v in a.items() if k in ("filename", "nextcloud_path", "content_type", "size")}
|
||||
for a in raw_attachments if isinstance(a, dict) and a.get("filename")
|
||||
]
|
||||
if row.get("to_addrs") and isinstance(row["to_addrs"], str):
|
||||
try:
|
||||
row["to_addrs"] = json.loads(row["to_addrs"])
|
||||
except Exception:
|
||||
row["to_addrs"] = []
|
||||
# SQLite stores booleans as integers
|
||||
row["is_important"] = bool(row.get("is_important", 0))
|
||||
row["is_read"] = bool(row.get("is_read", 0))
|
||||
return CommInDB(**{k: v for k, v in row.items() if k in CommInDB.model_fields})
|
||||
|
||||
|
||||
async def list_comms(
|
||||
customer_id: str,
|
||||
type: str | None = None,
|
||||
direction: str | None = None,
|
||||
limit: int = 100,
|
||||
) -> list[CommInDB]:
|
||||
db = await mqtt_db.get_db()
|
||||
where = ["customer_id = ?"]
|
||||
params: list = [customer_id]
|
||||
if type:
|
||||
where.append("type = ?")
|
||||
params.append(type)
|
||||
if direction:
|
||||
where.append("direction = ?")
|
||||
params.append(direction)
|
||||
clause = " AND ".join(where)
|
||||
rows = await db.execute_fetchall(
|
||||
f"SELECT * FROM crm_comms_log WHERE {clause} ORDER BY COALESCE(occurred_at, created_at) DESC, created_at DESC LIMIT ?",
|
||||
params + [limit],
|
||||
)
|
||||
entries = [_row_to_comm(dict(r)) for r in rows]
|
||||
|
||||
# Fallback: include unlinked email rows (customer_id NULL) if addresses match this customer.
|
||||
# This covers historical rows created before automatic outbound customer linking.
|
||||
fs = get_db()
|
||||
doc = fs.collection("crm_customers").document(customer_id).get()
|
||||
if doc.exists:
|
||||
data = doc.to_dict() or {}
|
||||
customer_emails = {
|
||||
(c.get("value") or "").strip().lower()
|
||||
for c in (data.get("contacts") or [])
|
||||
if c.get("type") == "email" and c.get("value")
|
||||
}
|
||||
else:
|
||||
customer_emails = set()
|
||||
|
||||
if customer_emails:
|
||||
extra_where = [
|
||||
"type = 'email'",
|
||||
"(customer_id IS NULL OR customer_id = '')",
|
||||
]
|
||||
extra_params: list = []
|
||||
if direction:
|
||||
extra_where.append("direction = ?")
|
||||
extra_params.append(direction)
|
||||
extra_clause = " AND ".join(extra_where)
|
||||
extra_rows = await db.execute_fetchall(
|
||||
f"SELECT * FROM crm_comms_log WHERE {extra_clause} "
|
||||
"ORDER BY COALESCE(occurred_at, created_at) DESC, created_at DESC LIMIT ?",
|
||||
extra_params + [max(limit, 300)],
|
||||
)
|
||||
for r in extra_rows:
|
||||
e = _row_to_comm(dict(r))
|
||||
from_addr = (e.from_addr or "").strip().lower()
|
||||
to_addrs = [(a or "").strip().lower() for a in (e.to_addrs or [])]
|
||||
matched = (from_addr in customer_emails) or any(a in customer_emails for a in to_addrs)
|
||||
if matched:
|
||||
entries.append(e)
|
||||
|
||||
# De-duplicate and sort consistently
|
||||
uniq = {e.id: e for e in entries}
|
||||
sorted_entries = sorted(
|
||||
uniq.values(),
|
||||
key=lambda e: ((e.occurred_at or e.created_at or ""), (e.created_at or ""), (e.id or "")),
|
||||
reverse=True,
|
||||
)
|
||||
return sorted_entries[:limit]
|
||||
|
||||
|
||||
async def list_all_emails(
|
||||
direction: str | None = None,
|
||||
customers_only: bool = False,
|
||||
mail_accounts: list[str] | None = None,
|
||||
limit: int = 500,
|
||||
) -> list[CommInDB]:
|
||||
db = await mqtt_db.get_db()
|
||||
where = ["type = 'email'"]
|
||||
params: list = []
|
||||
if direction:
|
||||
where.append("direction = ?")
|
||||
params.append(direction)
|
||||
if customers_only:
|
||||
where.append("customer_id IS NOT NULL")
|
||||
if mail_accounts:
|
||||
placeholders = ",".join("?" for _ in mail_accounts)
|
||||
where.append(f"mail_account IN ({placeholders})")
|
||||
params.extend(mail_accounts)
|
||||
clause = f"WHERE {' AND '.join(where)}"
|
||||
rows = await db.execute_fetchall(
|
||||
f"SELECT * FROM crm_comms_log {clause} ORDER BY COALESCE(occurred_at, created_at) DESC, created_at DESC LIMIT ?",
|
||||
params + [limit],
|
||||
)
|
||||
return [_row_to_comm(dict(r)) for r in rows]
|
||||
|
||||
|
||||
async def list_all_comms(
|
||||
type: str | None = None,
|
||||
direction: str | None = None,
|
||||
limit: int = 200,
|
||||
) -> list[CommInDB]:
|
||||
db = await mqtt_db.get_db()
|
||||
where = []
|
||||
params: list = []
|
||||
if type:
|
||||
where.append("type = ?")
|
||||
params.append(type)
|
||||
if direction:
|
||||
where.append("direction = ?")
|
||||
params.append(direction)
|
||||
clause = f"WHERE {' AND '.join(where)}" if where else ""
|
||||
rows = await db.execute_fetchall(
|
||||
f"SELECT * FROM crm_comms_log {clause} ORDER BY COALESCE(occurred_at, created_at) DESC, created_at DESC LIMIT ?",
|
||||
params + [limit],
|
||||
)
|
||||
return [_row_to_comm(dict(r)) for r in rows]
|
||||
|
||||
|
||||
async def get_comm(comm_id: str) -> CommInDB:
|
||||
db = await mqtt_db.get_db()
|
||||
rows = await db.execute_fetchall(
|
||||
"SELECT * FROM crm_comms_log WHERE id = ?", (comm_id,)
|
||||
)
|
||||
if not rows:
|
||||
raise HTTPException(status_code=404, detail="Comm entry not found")
|
||||
return _row_to_comm(dict(rows[0]))
|
||||
|
||||
|
||||
async def create_comm(data: CommCreate) -> CommInDB:
|
||||
db = await mqtt_db.get_db()
|
||||
now = datetime.utcnow().isoformat()
|
||||
comm_id = str(uuid.uuid4())
|
||||
occurred_at = data.occurred_at or now
|
||||
attachments_json = json.dumps([a.model_dump() for a in data.attachments])
|
||||
|
||||
await db.execute(
|
||||
"""INSERT INTO crm_comms_log
|
||||
(id, customer_id, type, mail_account, direction, subject, body, attachments,
|
||||
ext_message_id, logged_by, occurred_at, created_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
|
||||
(comm_id, data.customer_id, data.type.value, data.mail_account, data.direction.value,
|
||||
data.subject, data.body, attachments_json,
|
||||
data.ext_message_id, data.logged_by, occurred_at, now),
|
||||
)
|
||||
await db.commit()
|
||||
return await get_comm(comm_id)
|
||||
|
||||
|
||||
async def update_comm(comm_id: str, data: CommUpdate) -> CommInDB:
|
||||
db = await mqtt_db.get_db()
|
||||
rows = await db.execute_fetchall(
|
||||
"SELECT id FROM crm_comms_log WHERE id = ?", (comm_id,)
|
||||
)
|
||||
if not rows:
|
||||
raise HTTPException(status_code=404, detail="Comm entry not found")
|
||||
|
||||
updates = data.model_dump(exclude_none=True)
|
||||
if not updates:
|
||||
return await get_comm(comm_id)
|
||||
|
||||
set_clause = ", ".join(f"{k} = ?" for k in updates)
|
||||
await db.execute(
|
||||
f"UPDATE crm_comms_log SET {set_clause} WHERE id = ?",
|
||||
list(updates.values()) + [comm_id],
|
||||
)
|
||||
await db.commit()
|
||||
return await get_comm(comm_id)
|
||||
|
||||
|
||||
async def delete_comm(comm_id: str) -> None:
|
||||
db = await mqtt_db.get_db()
|
||||
rows = await db.execute_fetchall(
|
||||
"SELECT id FROM crm_comms_log WHERE id = ?", (comm_id,)
|
||||
)
|
||||
if not rows:
|
||||
raise HTTPException(status_code=404, detail="Comm entry not found")
|
||||
await db.execute("DELETE FROM crm_comms_log WHERE id = ?", (comm_id,))
|
||||
await db.commit()
|
||||
|
||||
|
||||
async def delete_comms_bulk(ids: list[str]) -> int:
|
||||
"""Delete multiple comm entries. Returns count deleted."""
|
||||
if not ids:
|
||||
return 0
|
||||
db = await mqtt_db.get_db()
|
||||
placeholders = ",".join("?" for _ in ids)
|
||||
cursor = await db.execute(
|
||||
f"DELETE FROM crm_comms_log WHERE id IN ({placeholders})", ids
|
||||
)
|
||||
await db.commit()
|
||||
return cursor.rowcount
|
||||
|
||||
|
||||
async def set_comm_important(comm_id: str, important: bool) -> CommInDB:
|
||||
db = await mqtt_db.get_db()
|
||||
await db.execute(
|
||||
"UPDATE crm_comms_log SET is_important = ? WHERE id = ?",
|
||||
(1 if important else 0, comm_id),
|
||||
)
|
||||
await db.commit()
|
||||
return await get_comm(comm_id)
|
||||
|
||||
|
||||
async def set_comm_read(comm_id: str, read: bool) -> CommInDB:
|
||||
db = await mqtt_db.get_db()
|
||||
await db.execute(
|
||||
"UPDATE crm_comms_log SET is_read = ? WHERE id = ?",
|
||||
(1 if read else 0, comm_id),
|
||||
)
|
||||
await db.commit()
|
||||
return await get_comm(comm_id)
|
||||
|
||||
|
||||
# ── Media (SQLite, async) ─────────────────────────────────────────────────────
|
||||
|
||||
def _row_to_media(row: dict) -> MediaInDB:
|
||||
row = dict(row)
|
||||
row["tags"] = json.loads(row.get("tags") or "[]")
|
||||
return MediaInDB(**row)
|
||||
|
||||
|
||||
async def list_media(
|
||||
customer_id: str | None = None,
|
||||
order_id: str | None = None,
|
||||
) -> list[MediaInDB]:
|
||||
db = await mqtt_db.get_db()
|
||||
where = []
|
||||
params: list = []
|
||||
if customer_id:
|
||||
where.append("customer_id = ?")
|
||||
params.append(customer_id)
|
||||
if order_id:
|
||||
where.append("order_id = ?")
|
||||
params.append(order_id)
|
||||
clause = f"WHERE {' AND '.join(where)}" if where else ""
|
||||
rows = await db.execute_fetchall(
|
||||
f"SELECT * FROM crm_media {clause} ORDER BY created_at DESC",
|
||||
params,
|
||||
)
|
||||
return [_row_to_media(dict(r)) for r in rows]
|
||||
|
||||
|
||||
async def create_media(data: MediaCreate) -> MediaInDB:
|
||||
db = await mqtt_db.get_db()
|
||||
now = datetime.utcnow().isoformat()
|
||||
media_id = str(uuid.uuid4())
|
||||
tags_json = json.dumps(data.tags)
|
||||
direction = data.direction.value if data.direction else None
|
||||
|
||||
await db.execute(
|
||||
"""INSERT INTO crm_media
|
||||
(id, customer_id, order_id, filename, nextcloud_path, mime_type,
|
||||
direction, tags, uploaded_by, created_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
|
||||
(media_id, data.customer_id, data.order_id, data.filename,
|
||||
data.nextcloud_path, data.mime_type, direction,
|
||||
tags_json, data.uploaded_by, now),
|
||||
)
|
||||
await db.commit()
|
||||
|
||||
rows = await db.execute_fetchall(
|
||||
"SELECT * FROM crm_media WHERE id = ?", (media_id,)
|
||||
)
|
||||
return _row_to_media(dict(rows[0]))
|
||||
|
||||
|
||||
async def delete_media(media_id: str) -> None:
|
||||
db = await mqtt_db.get_db()
|
||||
rows = await db.execute_fetchall(
|
||||
"SELECT id FROM crm_media WHERE id = ?", (media_id,)
|
||||
)
|
||||
if not rows:
|
||||
raise HTTPException(status_code=404, detail="Media entry not found")
|
||||
await db.execute("DELETE FROM crm_media WHERE id = ?", (media_id,))
|
||||
await db.commit()
|
||||
Reference in New Issue
Block a user