fix: Bugs created after the overhaul, performance and layout fixes
This commit is contained in:
@@ -1,6 +1,6 @@
|
||||
import asyncio
|
||||
import logging
|
||||
from fastapi import APIRouter, Depends, Query, BackgroundTasks
|
||||
from fastapi import APIRouter, Depends, Query, BackgroundTasks, Body
|
||||
from typing import Optional
|
||||
|
||||
from auth.models import TokenPayload
|
||||
@@ -14,15 +14,25 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@router.get("", response_model=CustomerListResponse)
|
||||
def list_customers(
|
||||
async def list_customers(
|
||||
search: Optional[str] = Query(None),
|
||||
tag: Optional[str] = Query(None),
|
||||
sort: Optional[str] = Query(None),
|
||||
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||
):
|
||||
customers = service.list_customers(search=search, tag=tag)
|
||||
customers = service.list_customers(search=search, tag=tag, sort=sort)
|
||||
if sort == "latest_comm":
|
||||
customers = await service.list_customers_sorted_by_latest_comm(customers)
|
||||
return CustomerListResponse(customers=customers, total=len(customers))
|
||||
|
||||
|
||||
@router.get("/tags", response_model=list[str])
|
||||
def list_tags(
|
||||
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||
):
|
||||
return service.list_all_tags()
|
||||
|
||||
|
||||
@router.get("/{customer_id}", response_model=CustomerInDB)
|
||||
def get_customer(
|
||||
customer_id: str,
|
||||
@@ -64,8 +74,57 @@ def update_customer(
|
||||
|
||||
|
||||
@router.delete("/{customer_id}", status_code=204)
|
||||
def delete_customer(
|
||||
async def delete_customer(
|
||||
customer_id: str,
|
||||
wipe_comms: bool = Query(False),
|
||||
wipe_files: bool = Query(False),
|
||||
wipe_nextcloud: bool = Query(False),
|
||||
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||
):
|
||||
customer = service.delete_customer(customer_id)
|
||||
nc_path = service.get_customer_nc_path(customer)
|
||||
|
||||
if wipe_comms or wipe_nextcloud:
|
||||
await service.delete_customer_comms(customer_id)
|
||||
|
||||
if wipe_files or wipe_nextcloud:
|
||||
await service.delete_customer_media_entries(customer_id)
|
||||
|
||||
if settings.nextcloud_url:
|
||||
folder = f"customers/{nc_path}"
|
||||
if wipe_nextcloud:
|
||||
try:
|
||||
await nextcloud.delete_file(folder)
|
||||
except Exception as e:
|
||||
logger.warning("Could not delete NC folder for customer %s: %s", customer_id, e)
|
||||
elif wipe_files:
|
||||
stale_folder = f"customers/STALE_{nc_path}"
|
||||
try:
|
||||
await nextcloud.rename_folder(folder, stale_folder)
|
||||
except Exception as e:
|
||||
logger.warning("Could not rename NC folder for customer %s: %s", customer_id, e)
|
||||
|
||||
|
||||
@router.post("/{customer_id}/toggle-negotiating", response_model=CustomerInDB)
|
||||
async def toggle_negotiating(
|
||||
customer_id: str,
|
||||
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||
):
|
||||
service.delete_customer(customer_id)
|
||||
return await service.toggle_negotiating(customer_id)
|
||||
|
||||
|
||||
@router.post("/{customer_id}/toggle-problem", response_model=CustomerInDB)
|
||||
async def toggle_problem(
|
||||
customer_id: str,
|
||||
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||
):
|
||||
return await service.toggle_problem(customer_id)
|
||||
|
||||
|
||||
@router.get("/{customer_id}/last-comm-direction")
|
||||
async def get_last_comm_direction(
|
||||
customer_id: str,
|
||||
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||
):
|
||||
direction = await service.get_last_comm_direction(customer_id)
|
||||
return {"direction": direction}
|
||||
|
||||
@@ -23,7 +23,7 @@ from email import encoders
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
from config import settings
|
||||
from mqtt import database as mqtt_db
|
||||
import database as mqtt_db
|
||||
from crm.mail_accounts import get_mail_accounts, account_by_key, account_by_email
|
||||
|
||||
logger = logging.getLogger("crm.email_sync")
|
||||
|
||||
@@ -35,6 +35,10 @@ class ProductCreate(BaseModel):
|
||||
sku: Optional[str] = None
|
||||
category: ProductCategory
|
||||
description: Optional[str] = None
|
||||
name_en: Optional[str] = None
|
||||
name_gr: Optional[str] = None
|
||||
description_en: Optional[str] = None
|
||||
description_gr: Optional[str] = None
|
||||
price: float
|
||||
currency: str = "EUR"
|
||||
costs: Optional[ProductCosts] = None
|
||||
@@ -49,6 +53,10 @@ class ProductUpdate(BaseModel):
|
||||
sku: Optional[str] = None
|
||||
category: Optional[ProductCategory] = None
|
||||
description: Optional[str] = None
|
||||
name_en: Optional[str] = None
|
||||
name_gr: Optional[str] = None
|
||||
description_en: Optional[str] = None
|
||||
description_gr: Optional[str] = None
|
||||
price: Optional[float] = None
|
||||
currency: Optional[str] = None
|
||||
costs: Optional[ProductCosts] = None
|
||||
@@ -114,9 +122,11 @@ class OwnedItem(BaseModel):
|
||||
|
||||
|
||||
class CustomerLocation(BaseModel):
|
||||
address: Optional[str] = None
|
||||
city: Optional[str] = None
|
||||
country: Optional[str] = None
|
||||
postal_code: Optional[str] = None
|
||||
region: Optional[str] = None
|
||||
country: Optional[str] = None
|
||||
|
||||
|
||||
class CustomerCreate(BaseModel):
|
||||
@@ -124,6 +134,7 @@ class CustomerCreate(BaseModel):
|
||||
name: str
|
||||
surname: Optional[str] = None
|
||||
organization: Optional[str] = None
|
||||
religion: Optional[str] = None
|
||||
contacts: List[CustomerContact] = []
|
||||
notes: List[CustomerNote] = []
|
||||
location: Optional[CustomerLocation] = None
|
||||
@@ -133,6 +144,8 @@ class CustomerCreate(BaseModel):
|
||||
linked_user_ids: List[str] = []
|
||||
nextcloud_folder: Optional[str] = None
|
||||
folder_id: Optional[str] = None # Human-readable Nextcloud folder name, e.g. "saint-john-corfu"
|
||||
negotiating: bool = False
|
||||
has_problem: bool = False
|
||||
|
||||
|
||||
class CustomerUpdate(BaseModel):
|
||||
@@ -140,6 +153,7 @@ class CustomerUpdate(BaseModel):
|
||||
name: Optional[str] = None
|
||||
surname: Optional[str] = None
|
||||
organization: Optional[str] = None
|
||||
religion: Optional[str] = None
|
||||
contacts: Optional[List[CustomerContact]] = None
|
||||
notes: Optional[List[CustomerNote]] = None
|
||||
location: Optional[CustomerLocation] = None
|
||||
@@ -148,6 +162,8 @@ class CustomerUpdate(BaseModel):
|
||||
owned_items: Optional[List[OwnedItem]] = None
|
||||
linked_user_ids: Optional[List[str]] = None
|
||||
nextcloud_folder: Optional[str] = None
|
||||
negotiating: Optional[bool] = None
|
||||
has_problem: Optional[bool] = None
|
||||
# folder_id intentionally excluded from update — set once at creation
|
||||
|
||||
|
||||
@@ -286,8 +302,11 @@ class CommCreate(BaseModel):
|
||||
|
||||
|
||||
class CommUpdate(BaseModel):
|
||||
type: Optional[CommType] = None
|
||||
direction: Optional[CommDirection] = None
|
||||
subject: Optional[str] = None
|
||||
body: Optional[str] = None
|
||||
logged_by: Optional[str] = None
|
||||
occurred_at: Optional[str] = None
|
||||
|
||||
|
||||
@@ -333,6 +352,7 @@ class MediaCreate(BaseModel):
|
||||
direction: Optional[MediaDirection] = None
|
||||
tags: List[str] = []
|
||||
uploaded_by: Optional[str] = None
|
||||
thumbnail_path: Optional[str] = None
|
||||
|
||||
|
||||
class MediaInDB(BaseModel):
|
||||
@@ -346,6 +366,7 @@ class MediaInDB(BaseModel):
|
||||
tags: List[str] = []
|
||||
uploaded_by: Optional[str] = None
|
||||
created_at: str
|
||||
thumbnail_path: Optional[str] = None
|
||||
|
||||
|
||||
class MediaListResponse(BaseModel):
|
||||
|
||||
@@ -312,3 +312,18 @@ async def delete_file(relative_path: str) -> None:
|
||||
resp = await client.request("DELETE", url, auth=_auth())
|
||||
if resp.status_code not in (200, 204, 404):
|
||||
raise HTTPException(status_code=502, detail=f"Nextcloud delete failed: {resp.status_code}")
|
||||
|
||||
|
||||
async def rename_folder(old_relative_path: str, new_relative_path: str) -> None:
|
||||
"""Rename/move a folder in Nextcloud using WebDAV MOVE."""
|
||||
url = _full_url(old_relative_path)
|
||||
destination = _full_url(new_relative_path)
|
||||
client = _get_client()
|
||||
resp = await client.request(
|
||||
"MOVE",
|
||||
url,
|
||||
auth=_auth(),
|
||||
headers={"Destination": destination, "Overwrite": "F"},
|
||||
)
|
||||
if resp.status_code not in (201, 204):
|
||||
raise HTTPException(status_code=502, detail=f"Nextcloud rename failed: {resp.status_code}")
|
||||
|
||||
@@ -10,6 +10,7 @@ Folder convention (all paths relative to nextcloud_base_path = BellSystems/Conso
|
||||
folder_id = customer.folder_id if set, else customer.id (legacy fallback).
|
||||
"""
|
||||
from fastapi import APIRouter, Depends, Query, UploadFile, File, Form, Response, HTTPException, Request
|
||||
from fastapi.responses import StreamingResponse
|
||||
from typing import Optional
|
||||
|
||||
from jose import JWTError
|
||||
@@ -17,7 +18,9 @@ from auth.models import TokenPayload
|
||||
from auth.dependencies import require_permission
|
||||
from auth.utils import decode_access_token
|
||||
from crm import nextcloud, service
|
||||
from config import settings
|
||||
from crm.models import MediaCreate, MediaDirection
|
||||
from crm.thumbnails import generate_thumbnail
|
||||
|
||||
router = APIRouter(prefix="/api/crm/nextcloud", tags=["crm-nextcloud"])
|
||||
|
||||
@@ -30,6 +33,29 @@ DIRECTION_MAP = {
|
||||
}
|
||||
|
||||
|
||||
@router.get("/web-url")
|
||||
async def get_web_url(
|
||||
path: str = Query(..., description="Path relative to nextcloud_base_path"),
|
||||
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||
):
|
||||
"""
|
||||
Return the Nextcloud Files web-UI URL for a given file path.
|
||||
Opens the parent folder with the file highlighted.
|
||||
"""
|
||||
if not settings.nextcloud_url:
|
||||
raise HTTPException(status_code=503, detail="Nextcloud not configured")
|
||||
base = settings.nextcloud_base_path.strip("/")
|
||||
# path is relative to base, e.g. "customers/abc/media/photo.jpg"
|
||||
parts = path.rsplit("/", 1)
|
||||
folder_rel = parts[0] if len(parts) == 2 else ""
|
||||
filename = parts[-1]
|
||||
nc_dir = f"/{base}/{folder_rel}" if folder_rel else f"/{base}"
|
||||
from urllib.parse import urlencode, quote
|
||||
qs = urlencode({"dir": nc_dir, "scrollto": filename})
|
||||
url = f"{settings.nextcloud_url.rstrip('/')}/index.php/apps/files/?{qs}"
|
||||
return {"url": url}
|
||||
|
||||
|
||||
@router.get("/browse")
|
||||
async def browse(
|
||||
path: str = Query(..., description="Path relative to nextcloud_base_path"),
|
||||
@@ -56,6 +82,14 @@ async def browse_all(
|
||||
|
||||
all_files = await nextcloud.list_folder_recursive(base)
|
||||
|
||||
# Exclude _info.txt stubs — human-readable only, should never appear in the UI.
|
||||
# .thumbs/ files are kept: the frontend needs them to build the thumbnail map
|
||||
# (it already filters them out of the visible file list itself).
|
||||
all_files = [
|
||||
f for f in all_files
|
||||
if not f["path"].endswith("/_info.txt")
|
||||
]
|
||||
|
||||
# Tag each file with the top-level subfolder it lives under
|
||||
for item in all_files:
|
||||
parts = item["path"].split("/")
|
||||
@@ -84,33 +118,54 @@ async def proxy_file(
|
||||
except (JWTError, KeyError):
|
||||
raise HTTPException(status_code=403, detail="Invalid token")
|
||||
|
||||
content, mime_type = await nextcloud.download_file(path)
|
||||
total = len(content)
|
||||
|
||||
# Forward the Range header to Nextcloud so we get a true partial response
|
||||
# without buffering the whole file into memory.
|
||||
nc_url = nextcloud._full_url(path)
|
||||
nc_auth = nextcloud._auth()
|
||||
forward_headers = {}
|
||||
range_header = request.headers.get("range")
|
||||
if range_header and range_header.startswith("bytes="):
|
||||
# Parse "bytes=start-end"
|
||||
try:
|
||||
range_spec = range_header[6:]
|
||||
start_str, _, end_str = range_spec.partition("-")
|
||||
start = int(start_str) if start_str else 0
|
||||
end = int(end_str) if end_str else total - 1
|
||||
end = min(end, total - 1)
|
||||
chunk = content[start:end + 1]
|
||||
headers = {
|
||||
"Content-Range": f"bytes {start}-{end}/{total}",
|
||||
"Accept-Ranges": "bytes",
|
||||
"Content-Length": str(len(chunk)),
|
||||
"Content-Type": mime_type,
|
||||
}
|
||||
return Response(content=chunk, status_code=206, headers=headers, media_type=mime_type)
|
||||
except (ValueError, IndexError):
|
||||
pass
|
||||
if range_header:
|
||||
forward_headers["Range"] = range_header
|
||||
|
||||
return Response(
|
||||
content=content,
|
||||
import httpx as _httpx
|
||||
|
||||
# Use a dedicated streaming client — httpx.stream() keeps the connection open
|
||||
# for the lifetime of the generator, so we can't reuse the shared persistent client.
|
||||
# We enter the stream context here to get headers immediately (no body buffering),
|
||||
# then hand the body iterator to StreamingResponse.
|
||||
stream_client = _httpx.AsyncClient(timeout=None, follow_redirects=True)
|
||||
nc_resp_ctx = stream_client.stream("GET", nc_url, auth=nc_auth, headers=forward_headers)
|
||||
nc_resp = await nc_resp_ctx.__aenter__()
|
||||
|
||||
if nc_resp.status_code == 404:
|
||||
await nc_resp_ctx.__aexit__(None, None, None)
|
||||
await stream_client.aclose()
|
||||
raise HTTPException(status_code=404, detail="File not found in Nextcloud")
|
||||
if nc_resp.status_code not in (200, 206):
|
||||
await nc_resp_ctx.__aexit__(None, None, None)
|
||||
await stream_client.aclose()
|
||||
raise HTTPException(status_code=502, detail=f"Nextcloud returned {nc_resp.status_code}")
|
||||
|
||||
mime_type = nc_resp.headers.get("content-type", "application/octet-stream").split(";")[0].strip()
|
||||
|
||||
resp_headers = {"Accept-Ranges": "bytes"}
|
||||
for h in ("content-range", "content-length"):
|
||||
if h in nc_resp.headers:
|
||||
resp_headers[h.title()] = nc_resp.headers[h]
|
||||
|
||||
async def _stream():
|
||||
try:
|
||||
async for chunk in nc_resp.aiter_bytes(chunk_size=64 * 1024):
|
||||
yield chunk
|
||||
finally:
|
||||
await nc_resp_ctx.__aexit__(None, None, None)
|
||||
await stream_client.aclose()
|
||||
|
||||
return StreamingResponse(
|
||||
_stream(),
|
||||
status_code=nc_resp.status_code,
|
||||
media_type=mime_type,
|
||||
headers={"Accept-Ranges": "bytes", "Content-Length": str(total)},
|
||||
headers=resp_headers,
|
||||
)
|
||||
|
||||
|
||||
@@ -164,6 +219,24 @@ async def upload_file(
|
||||
mime_type = file.content_type or "application/octet-stream"
|
||||
await nextcloud.upload_file(file_path, content, mime_type)
|
||||
|
||||
# Generate and upload thumbnail (best-effort, non-blocking)
|
||||
# Always stored as {stem}.jpg regardless of source extension so the thumb
|
||||
# filename is unambiguous and the existence check can never false-positive.
|
||||
thumb_path = None
|
||||
try:
|
||||
thumb_bytes = generate_thumbnail(content, mime_type, file.filename)
|
||||
if thumb_bytes:
|
||||
thumb_folder = f"{target_folder}/.thumbs"
|
||||
stem = file.filename.rsplit(".", 1)[0] if "." in file.filename else file.filename
|
||||
thumb_filename = f"{stem}.jpg"
|
||||
thumb_nc_path = f"{thumb_folder}/{thumb_filename}"
|
||||
await nextcloud.ensure_folder(thumb_folder)
|
||||
await nextcloud.upload_file(thumb_nc_path, thumb_bytes, "image/jpeg")
|
||||
thumb_path = thumb_nc_path
|
||||
except Exception as e:
|
||||
import logging
|
||||
logging.getLogger(__name__).warning("Thumbnail generation failed for %s: %s", file.filename, e)
|
||||
|
||||
# Resolve direction
|
||||
resolved_direction = None
|
||||
if direction:
|
||||
@@ -184,6 +257,7 @@ async def upload_file(
|
||||
direction=resolved_direction,
|
||||
tags=tag_list,
|
||||
uploaded_by=_user.name,
|
||||
thumbnail_path=thumb_path,
|
||||
))
|
||||
|
||||
return media_record
|
||||
@@ -244,6 +318,11 @@ async def sync_nextcloud_files(
|
||||
|
||||
# Collect all NC files recursively (handles nested folders at any depth)
|
||||
all_nc_files = await nextcloud.list_folder_recursive(base)
|
||||
# Skip .thumbs/ folder contents and the _info.txt stub — these are internal
|
||||
all_nc_files = [
|
||||
f for f in all_nc_files
|
||||
if "/.thumbs/" not in f["path"] and not f["path"].endswith("/_info.txt")
|
||||
]
|
||||
for item in all_nc_files:
|
||||
parts = item["path"].split("/")
|
||||
item["_subfolder"] = parts[2] if len(parts) > 2 else "media"
|
||||
@@ -274,6 +353,105 @@ async def sync_nextcloud_files(
|
||||
return {"synced": synced, "skipped": skipped}
|
||||
|
||||
|
||||
@router.post("/generate-thumbs")
|
||||
async def generate_thumbs(
|
||||
customer_id: str = Form(...),
|
||||
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||
):
|
||||
"""
|
||||
Scan all customer files in Nextcloud and generate thumbnails for any file
|
||||
that doesn't already have one in the corresponding .thumbs/ sub-folder.
|
||||
Skips files inside .thumbs/ itself and file types that can't be thumbnailed.
|
||||
Returns counts of generated, skipped (already exists), and failed files.
|
||||
"""
|
||||
customer = service.get_customer(customer_id)
|
||||
nc_path = service.get_customer_nc_path(customer)
|
||||
base = f"customers/{nc_path}"
|
||||
|
||||
all_nc_files = await nextcloud.list_folder_recursive(base)
|
||||
|
||||
# Build a set of existing thumb paths for O(1) lookup
|
||||
existing_thumbs = {
|
||||
f["path"] for f in all_nc_files if "/.thumbs/" in f["path"]
|
||||
}
|
||||
|
||||
# Only process real files (not thumbs themselves)
|
||||
candidates = [f for f in all_nc_files if "/.thumbs/" not in f["path"]]
|
||||
|
||||
generated = 0
|
||||
skipped = 0
|
||||
failed = 0
|
||||
|
||||
for f in candidates:
|
||||
# Derive where the thumb would live
|
||||
path = f["path"] # e.g. customers/{nc_path}/{subfolder}/photo.jpg
|
||||
parts = path.rsplit("/", 1)
|
||||
if len(parts) != 2:
|
||||
skipped += 1
|
||||
continue
|
||||
parent_folder, filename = parts
|
||||
stem = filename.rsplit(".", 1)[0] if "." in filename else filename
|
||||
thumb_filename = f"{stem}.jpg"
|
||||
thumb_nc_path = f"{parent_folder}/.thumbs/{thumb_filename}"
|
||||
|
||||
if thumb_nc_path in existing_thumbs:
|
||||
skipped += 1
|
||||
continue
|
||||
|
||||
# Download the file, generate thumb, upload
|
||||
try:
|
||||
content, mime_type = await nextcloud.download_file(path)
|
||||
thumb_bytes = generate_thumbnail(content, mime_type, filename)
|
||||
if not thumb_bytes:
|
||||
skipped += 1 # unsupported file type
|
||||
continue
|
||||
thumb_folder = f"{parent_folder}/.thumbs"
|
||||
await nextcloud.ensure_folder(thumb_folder)
|
||||
await nextcloud.upload_file(thumb_nc_path, thumb_bytes, "image/jpeg")
|
||||
generated += 1
|
||||
except Exception as e:
|
||||
import logging
|
||||
logging.getLogger(__name__).warning("Thumb gen failed for %s: %s", path, e)
|
||||
failed += 1
|
||||
|
||||
return {"generated": generated, "skipped": skipped, "failed": failed}
|
||||
|
||||
|
||||
@router.post("/clear-thumbs")
|
||||
async def clear_thumbs(
|
||||
customer_id: str = Form(...),
|
||||
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||
):
|
||||
"""
|
||||
Delete all .thumbs sub-folders for a customer across all subfolders.
|
||||
This lets you regenerate thumbnails from scratch.
|
||||
Returns count of .thumbs folders deleted.
|
||||
"""
|
||||
customer = service.get_customer(customer_id)
|
||||
nc_path = service.get_customer_nc_path(customer)
|
||||
base = f"customers/{nc_path}"
|
||||
|
||||
all_nc_files = await nextcloud.list_folder_recursive(base)
|
||||
|
||||
# Collect unique .thumbs folder paths
|
||||
thumb_folders = set()
|
||||
for f in all_nc_files:
|
||||
if "/.thumbs/" in f["path"]:
|
||||
folder = f["path"].split("/.thumbs/")[0] + "/.thumbs"
|
||||
thumb_folders.add(folder)
|
||||
|
||||
deleted = 0
|
||||
for folder in thumb_folders:
|
||||
try:
|
||||
await nextcloud.delete_file(folder)
|
||||
deleted += 1
|
||||
except Exception as e:
|
||||
import logging
|
||||
logging.getLogger(__name__).warning("Failed to delete .thumbs folder %s: %s", folder, e)
|
||||
|
||||
return {"deleted_folders": deleted}
|
||||
|
||||
|
||||
@router.post("/untrack-deleted")
|
||||
async def untrack_deleted_files(
|
||||
customer_id: str = Form(...),
|
||||
@@ -287,15 +465,22 @@ async def untrack_deleted_files(
|
||||
nc_path = service.get_customer_nc_path(customer)
|
||||
base = f"customers/{nc_path}"
|
||||
|
||||
# Collect all NC file paths recursively
|
||||
# Collect all NC file paths recursively (excluding thumbs and info stub)
|
||||
all_nc_files = await nextcloud.list_folder_recursive(base)
|
||||
nc_paths = {item["path"] for item in all_nc_files}
|
||||
nc_paths = {
|
||||
item["path"] for item in all_nc_files
|
||||
if "/.thumbs/" not in item["path"] and not item["path"].endswith("/_info.txt")
|
||||
}
|
||||
|
||||
# Find DB records whose NC path no longer exists
|
||||
# Find DB records whose NC path no longer exists, OR that are internal files
|
||||
# (_info.txt / .thumbs/) which should never have been tracked in the first place.
|
||||
existing = await service.list_media(customer_id=customer_id)
|
||||
untracked = 0
|
||||
for m in existing:
|
||||
if m.nextcloud_path and m.nextcloud_path not in nc_paths:
|
||||
is_internal = m.nextcloud_path and (
|
||||
"/.thumbs/" in m.nextcloud_path or m.nextcloud_path.endswith("/_info.txt")
|
||||
)
|
||||
if m.nextcloud_path and (is_internal or m.nextcloud_path not in nc_paths):
|
||||
try:
|
||||
await service.delete_media(m.id)
|
||||
untracked += 1
|
||||
|
||||
@@ -13,6 +13,8 @@ class QuotationStatus(str, Enum):
|
||||
class QuotationItemCreate(BaseModel):
|
||||
product_id: Optional[str] = None
|
||||
description: Optional[str] = None
|
||||
description_en: Optional[str] = None
|
||||
description_gr: Optional[str] = None
|
||||
unit_type: str = "pcs" # pcs / kg / m
|
||||
unit_cost: float = 0.0
|
||||
discount_percent: float = 0.0
|
||||
@@ -52,6 +54,10 @@ class QuotationCreate(BaseModel):
|
||||
client_location: Optional[str] = None
|
||||
client_phone: Optional[str] = None
|
||||
client_email: Optional[str] = None
|
||||
# Legacy quotation fields
|
||||
is_legacy: bool = False
|
||||
legacy_date: Optional[str] = None # ISO date string, manually set
|
||||
legacy_pdf_path: Optional[str] = None # Nextcloud path to uploaded PDF
|
||||
|
||||
|
||||
class QuotationUpdate(BaseModel):
|
||||
@@ -79,6 +85,10 @@ class QuotationUpdate(BaseModel):
|
||||
client_location: Optional[str] = None
|
||||
client_phone: Optional[str] = None
|
||||
client_email: Optional[str] = None
|
||||
# Legacy quotation fields
|
||||
is_legacy: Optional[bool] = None
|
||||
legacy_date: Optional[str] = None
|
||||
legacy_pdf_path: Optional[str] = None
|
||||
|
||||
|
||||
class QuotationInDB(BaseModel):
|
||||
@@ -118,6 +128,10 @@ class QuotationInDB(BaseModel):
|
||||
client_location: Optional[str] = None
|
||||
client_phone: Optional[str] = None
|
||||
client_email: Optional[str] = None
|
||||
# Legacy quotation fields
|
||||
is_legacy: bool = False
|
||||
legacy_date: Optional[str] = None
|
||||
legacy_pdf_path: Optional[str] = None
|
||||
|
||||
|
||||
class QuotationListItem(BaseModel):
|
||||
@@ -130,6 +144,9 @@ class QuotationListItem(BaseModel):
|
||||
created_at: str
|
||||
updated_at: str
|
||||
nextcloud_pdf_url: Optional[str] = None
|
||||
is_legacy: bool = False
|
||||
legacy_date: Optional[str] = None
|
||||
legacy_pdf_path: Optional[str] = None
|
||||
|
||||
|
||||
class QuotationListResponse(BaseModel):
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from fastapi import APIRouter, Depends, Query
|
||||
from fastapi import APIRouter, Depends, Query, UploadFile, File
|
||||
from fastapi.responses import StreamingResponse
|
||||
from typing import Optional
|
||||
import io
|
||||
@@ -28,6 +28,14 @@ async def get_next_number(
|
||||
return NextNumberResponse(next_number=next_num)
|
||||
|
||||
|
||||
@router.get("/all", response_model=list[dict])
|
||||
async def list_all_quotations(
|
||||
_user: TokenPayload = Depends(require_permission("crm", "view")),
|
||||
):
|
||||
"""Returns all quotations across all customers, each including customer_name."""
|
||||
return await svc.list_all_quotations()
|
||||
|
||||
|
||||
@router.get("/customer/{customer_id}", response_model=QuotationListResponse)
|
||||
async def list_quotations_for_customer(
|
||||
customer_id: str,
|
||||
@@ -99,3 +107,15 @@ async def regenerate_pdf(
|
||||
):
|
||||
"""Force PDF regeneration and re-upload to Nextcloud."""
|
||||
return await svc.regenerate_pdf(quotation_id)
|
||||
|
||||
|
||||
@router.post("/{quotation_id}/legacy-pdf", response_model=QuotationInDB)
|
||||
async def upload_legacy_pdf(
|
||||
quotation_id: str,
|
||||
file: UploadFile = File(...),
|
||||
_user: TokenPayload = Depends(require_permission("crm", "edit")),
|
||||
):
|
||||
"""Upload a PDF file for a legacy quotation and store its Nextcloud path."""
|
||||
pdf_bytes = await file.read()
|
||||
filename = file.filename or f"legacy-{quotation_id}.pdf"
|
||||
return await svc.upload_legacy_pdf(quotation_id, pdf_bytes, filename)
|
||||
|
||||
@@ -19,7 +19,7 @@ from crm.quotation_models import (
|
||||
QuotationUpdate,
|
||||
)
|
||||
from crm.service import get_customer
|
||||
from mqtt import database as mqtt_db
|
||||
import database as mqtt_db
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -153,10 +153,42 @@ async def get_next_number() -> str:
|
||||
return await _generate_quotation_number(db)
|
||||
|
||||
|
||||
async def list_all_quotations() -> list[dict]:
|
||||
"""Return all quotations across all customers, with customer_name injected."""
|
||||
from shared.firebase import get_db as get_firestore
|
||||
db = await mqtt_db.get_db()
|
||||
rows = await db.execute_fetchall(
|
||||
"SELECT id, quotation_number, title, customer_id, status, final_total, created_at, updated_at, "
|
||||
"nextcloud_pdf_url, is_legacy, legacy_date, legacy_pdf_path "
|
||||
"FROM crm_quotations ORDER BY created_at DESC",
|
||||
(),
|
||||
)
|
||||
items = [dict(r) for r in rows]
|
||||
# Fetch unique customer names from Firestore in one pass
|
||||
customer_ids = {i["customer_id"] for i in items if i.get("customer_id")}
|
||||
customer_names: dict[str, str] = {}
|
||||
if customer_ids:
|
||||
fstore = get_firestore()
|
||||
for cid in customer_ids:
|
||||
try:
|
||||
doc = fstore.collection("crm_customers").document(cid).get()
|
||||
if doc.exists:
|
||||
d = doc.to_dict()
|
||||
parts = [d.get("name", ""), d.get("surname", ""), d.get("organization", "")]
|
||||
label = " ".join(p for p in parts if p).strip()
|
||||
customer_names[cid] = label or cid
|
||||
except Exception:
|
||||
customer_names[cid] = cid
|
||||
for item in items:
|
||||
item["customer_name"] = customer_names.get(item["customer_id"], "")
|
||||
return items
|
||||
|
||||
|
||||
async def list_quotations(customer_id: str) -> list[QuotationListItem]:
|
||||
db = await mqtt_db.get_db()
|
||||
rows = await db.execute_fetchall(
|
||||
"SELECT id, quotation_number, title, customer_id, status, final_total, created_at, updated_at, nextcloud_pdf_url "
|
||||
"SELECT id, quotation_number, title, customer_id, status, final_total, created_at, updated_at, "
|
||||
"nextcloud_pdf_url, is_legacy, legacy_date, legacy_pdf_path "
|
||||
"FROM crm_quotations WHERE customer_id = ? ORDER BY created_at DESC",
|
||||
(customer_id,),
|
||||
)
|
||||
@@ -210,6 +242,7 @@ async def create_quotation(data: QuotationCreate, generate_pdf: bool = False) ->
|
||||
subtotal_before_discount, global_discount_amount, new_subtotal, vat_amount, final_total,
|
||||
nextcloud_pdf_path, nextcloud_pdf_url,
|
||||
client_org, client_name, client_location, client_phone, client_email,
|
||||
is_legacy, legacy_date, legacy_pdf_path,
|
||||
created_at, updated_at
|
||||
) VALUES (
|
||||
?, ?, ?, ?, ?,
|
||||
@@ -220,6 +253,7 @@ async def create_quotation(data: QuotationCreate, generate_pdf: bool = False) ->
|
||||
?, ?, ?, ?, ?,
|
||||
NULL, NULL,
|
||||
?, ?, ?, ?, ?,
|
||||
?, ?, ?,
|
||||
?, ?
|
||||
)""",
|
||||
(
|
||||
@@ -231,6 +265,7 @@ async def create_quotation(data: QuotationCreate, generate_pdf: bool = False) ->
|
||||
totals["subtotal_before_discount"], totals["global_discount_amount"],
|
||||
totals["new_subtotal"], totals["vat_amount"], totals["final_total"],
|
||||
data.client_org, data.client_name, data.client_location, data.client_phone, data.client_email,
|
||||
1 if data.is_legacy else 0, data.legacy_date, data.legacy_pdf_path,
|
||||
now, now,
|
||||
),
|
||||
)
|
||||
@@ -240,11 +275,12 @@ async def create_quotation(data: QuotationCreate, generate_pdf: bool = False) ->
|
||||
item_id = str(uuid.uuid4())
|
||||
await db.execute(
|
||||
"""INSERT INTO crm_quotation_items
|
||||
(id, quotation_id, product_id, description, unit_type, unit_cost,
|
||||
discount_percent, quantity, vat_percent, line_total, sort_order)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
|
||||
(id, quotation_id, product_id, description, description_en, description_gr,
|
||||
unit_type, unit_cost, discount_percent, quantity, vat_percent, line_total, sort_order)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
|
||||
(
|
||||
item_id, qid, item.get("product_id"), item.get("description"),
|
||||
item.get("description_en"), item.get("description_gr"),
|
||||
item.get("unit_type", "pcs"), item.get("unit_cost", 0),
|
||||
item.get("discount_percent", 0), item.get("quantity", 1),
|
||||
item.get("vat_percent", 24), item["line_total"], item.get("sort_order", i),
|
||||
@@ -255,7 +291,7 @@ async def create_quotation(data: QuotationCreate, generate_pdf: bool = False) ->
|
||||
|
||||
quotation = await get_quotation(qid)
|
||||
|
||||
if generate_pdf:
|
||||
if generate_pdf and not data.is_legacy:
|
||||
quotation = await _do_generate_and_upload_pdf(quotation)
|
||||
|
||||
return quotation
|
||||
@@ -285,6 +321,7 @@ async def update_quotation(quotation_id: str, data: QuotationUpdate, generate_pd
|
||||
"shipping_cost", "shipping_cost_discount", "install_cost",
|
||||
"install_cost_discount", "extras_label", "extras_cost",
|
||||
"client_org", "client_name", "client_location", "client_phone", "client_email",
|
||||
"legacy_date", "legacy_pdf_path",
|
||||
]
|
||||
|
||||
for field in scalar_fields:
|
||||
@@ -343,11 +380,12 @@ async def update_quotation(quotation_id: str, data: QuotationUpdate, generate_pd
|
||||
item_id = str(uuid.uuid4())
|
||||
await db.execute(
|
||||
"""INSERT INTO crm_quotation_items
|
||||
(id, quotation_id, product_id, description, unit_type, unit_cost,
|
||||
discount_percent, quantity, vat_percent, line_total, sort_order)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
|
||||
(id, quotation_id, product_id, description, description_en, description_gr,
|
||||
unit_type, unit_cost, discount_percent, quantity, vat_percent, line_total, sort_order)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
|
||||
(
|
||||
item_id, quotation_id, item.get("product_id"), item.get("description"),
|
||||
item.get("description_en"), item.get("description_gr"),
|
||||
item.get("unit_type", "pcs"), item.get("unit_cost", 0),
|
||||
item.get("discount_percent", 0), item.get("quantity", 1),
|
||||
item.get("vat_percent", 24), item["line_total"], item.get("sort_order", i),
|
||||
@@ -488,7 +526,33 @@ async def get_quotation_pdf_bytes(quotation_id: str) -> bytes:
|
||||
"""Download the PDF for a quotation from Nextcloud and return raw bytes."""
|
||||
from fastapi import HTTPException
|
||||
quotation = await get_quotation(quotation_id)
|
||||
if not quotation.nextcloud_pdf_path:
|
||||
raise HTTPException(status_code=404, detail="No PDF generated for this quotation")
|
||||
pdf_bytes, _ = await nextcloud.download_file(quotation.nextcloud_pdf_path)
|
||||
# For legacy quotations, the PDF is at legacy_pdf_path
|
||||
path = quotation.legacy_pdf_path if quotation.is_legacy else quotation.nextcloud_pdf_path
|
||||
if not path:
|
||||
raise HTTPException(status_code=404, detail="No PDF available for this quotation")
|
||||
pdf_bytes, _ = await nextcloud.download_file(path)
|
||||
return pdf_bytes
|
||||
|
||||
|
||||
async def upload_legacy_pdf(quotation_id: str, pdf_bytes: bytes, filename: str) -> QuotationInDB:
|
||||
"""Upload a legacy PDF to Nextcloud and store its path in the quotation record."""
|
||||
quotation = await get_quotation(quotation_id)
|
||||
if not quotation.is_legacy:
|
||||
raise HTTPException(status_code=400, detail="This quotation is not a legacy quotation")
|
||||
|
||||
from crm.service import get_customer, get_customer_nc_path
|
||||
customer = get_customer(quotation.customer_id)
|
||||
nc_folder = get_customer_nc_path(customer)
|
||||
|
||||
await nextcloud.ensure_folder(f"customers/{nc_folder}/quotations")
|
||||
rel_path = f"customers/{nc_folder}/quotations/{filename}"
|
||||
await nextcloud.upload_file(rel_path, pdf_bytes, "application/pdf")
|
||||
|
||||
db = await mqtt_db.get_db()
|
||||
now = datetime.utcnow().isoformat()
|
||||
await db.execute(
|
||||
"UPDATE crm_quotations SET legacy_pdf_path = ?, updated_at = ? WHERE id = ?",
|
||||
(rel_path, now, quotation_id),
|
||||
)
|
||||
await db.commit()
|
||||
return await get_quotation(quotation_id)
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import asyncio
|
||||
import json
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
@@ -6,7 +7,7 @@ from fastapi import HTTPException
|
||||
from shared.firebase import get_db
|
||||
from shared.exceptions import NotFoundError
|
||||
import re as _re
|
||||
from mqtt import database as mqtt_db
|
||||
import database as mqtt_db
|
||||
from crm.models import (
|
||||
ProductCreate, ProductUpdate, ProductInDB,
|
||||
CustomerCreate, CustomerUpdate, CustomerInDB,
|
||||
@@ -20,6 +21,11 @@ COLLECTION = "crm_products"
|
||||
|
||||
def _doc_to_product(doc) -> ProductInDB:
|
||||
data = doc.to_dict()
|
||||
# Backfill bilingual fields for existing products that predate the feature
|
||||
if not data.get("name_en") and data.get("name"):
|
||||
data["name_en"] = data["name"]
|
||||
if not data.get("name_gr") and data.get("name"):
|
||||
data["name_gr"] = data["name"]
|
||||
return ProductInDB(id=doc.id, **data)
|
||||
|
||||
|
||||
@@ -128,6 +134,7 @@ def _doc_to_customer(doc) -> CustomerInDB:
|
||||
def list_customers(
|
||||
search: str | None = None,
|
||||
tag: str | None = None,
|
||||
sort: str | None = None,
|
||||
) -> list[CustomerInDB]:
|
||||
db = get_db()
|
||||
query = db.collection(CUSTOMERS_COLLECTION)
|
||||
@@ -141,28 +148,64 @@ def list_customers(
|
||||
|
||||
if search:
|
||||
s = search.lower()
|
||||
s_nospace = s.replace(" ", "")
|
||||
name_match = s in (customer.name or "").lower()
|
||||
surname_match = s in (customer.surname or "").lower()
|
||||
org_match = s in (customer.organization or "").lower()
|
||||
religion_match = s in (customer.religion or "").lower()
|
||||
language_match = s in (customer.language or "").lower()
|
||||
contact_match = any(
|
||||
s in (c.value or "").lower()
|
||||
s_nospace in (c.value or "").lower().replace(" ", "")
|
||||
or s in (c.value or "").lower()
|
||||
for c in (customer.contacts or [])
|
||||
)
|
||||
loc = customer.location or {}
|
||||
loc_match = (
|
||||
s in (loc.get("city", "") or "").lower() or
|
||||
s in (loc.get("country", "") or "").lower() or
|
||||
s in (loc.get("region", "") or "").lower()
|
||||
loc = customer.location
|
||||
loc_match = bool(loc) and (
|
||||
s in (loc.address or "").lower() or
|
||||
s in (loc.city or "").lower() or
|
||||
s in (loc.postal_code or "").lower() or
|
||||
s in (loc.region or "").lower() or
|
||||
s in (loc.country or "").lower()
|
||||
)
|
||||
tag_match = any(s in (t or "").lower() for t in (customer.tags or []))
|
||||
if not (name_match or surname_match or org_match or contact_match or loc_match or tag_match):
|
||||
if not (name_match or surname_match or org_match or religion_match or language_match or contact_match or loc_match or tag_match):
|
||||
continue
|
||||
|
||||
results.append(customer)
|
||||
|
||||
# Sorting (non-latest_comm; latest_comm is handled by the async router wrapper)
|
||||
_TITLES = {"fr.", "rev.", "archim.", "bp.", "abp.", "met.", "mr.", "mrs.", "ms.", "dr.", "prof."}
|
||||
|
||||
def _sort_name(c):
|
||||
return (c.name or "").lower()
|
||||
|
||||
def _sort_surname(c):
|
||||
return (c.surname or "").lower()
|
||||
|
||||
def _sort_default(c):
|
||||
return c.created_at or ""
|
||||
|
||||
if sort == "name":
|
||||
results.sort(key=_sort_name)
|
||||
elif sort == "surname":
|
||||
results.sort(key=_sort_surname)
|
||||
elif sort == "default":
|
||||
results.sort(key=_sort_default)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def list_all_tags() -> list[str]:
|
||||
db = get_db()
|
||||
tags: set[str] = set()
|
||||
for doc in db.collection(CUSTOMERS_COLLECTION).select(["tags"]).stream():
|
||||
data = doc.to_dict()
|
||||
for tag in (data.get("tags") or []):
|
||||
if tag:
|
||||
tags.add(tag)
|
||||
return sorted(tags)
|
||||
|
||||
|
||||
def get_customer(customer_id: str) -> CustomerInDB:
|
||||
db = get_db()
|
||||
doc = db.collection(CUSTOMERS_COLLECTION).document(customer_id).get()
|
||||
@@ -206,6 +249,7 @@ def create_customer(data: CustomerCreate) -> CustomerInDB:
|
||||
|
||||
|
||||
def update_customer(customer_id: str, data: CustomerUpdate) -> CustomerInDB:
|
||||
from google.cloud.firestore_v1 import DELETE_FIELD
|
||||
db = get_db()
|
||||
doc_ref = db.collection(CUSTOMERS_COLLECTION).document(customer_id)
|
||||
doc = doc_ref.get()
|
||||
@@ -215,18 +259,110 @@ def update_customer(customer_id: str, data: CustomerUpdate) -> CustomerInDB:
|
||||
update_data = data.model_dump(exclude_none=True)
|
||||
update_data["updated_at"] = datetime.utcnow().isoformat()
|
||||
|
||||
# Fields that should be explicitly deleted from Firestore when set to None
|
||||
# (exclude_none=True would just skip them, leaving the old value intact)
|
||||
NULLABLE_FIELDS = {"title", "surname", "organization", "religion"}
|
||||
set_fields = data.model_fields_set
|
||||
for field in NULLABLE_FIELDS:
|
||||
if field in set_fields and getattr(data, field) is None:
|
||||
update_data[field] = DELETE_FIELD
|
||||
|
||||
doc_ref.update(update_data)
|
||||
updated_doc = doc_ref.get()
|
||||
return _doc_to_customer(updated_doc)
|
||||
|
||||
|
||||
def delete_customer(customer_id: str) -> None:
|
||||
async def toggle_negotiating(customer_id: str) -> CustomerInDB:
|
||||
db_fs = get_db()
|
||||
doc_ref = db_fs.collection(CUSTOMERS_COLLECTION).document(customer_id)
|
||||
doc = doc_ref.get()
|
||||
if not doc.exists:
|
||||
raise NotFoundError("Customer")
|
||||
current = doc.to_dict().get("negotiating", False)
|
||||
update_data = {"negotiating": not current, "updated_at": datetime.utcnow().isoformat()}
|
||||
doc_ref.update(update_data)
|
||||
return _doc_to_customer(doc_ref.get())
|
||||
|
||||
|
||||
async def toggle_problem(customer_id: str) -> CustomerInDB:
|
||||
db_fs = get_db()
|
||||
doc_ref = db_fs.collection(CUSTOMERS_COLLECTION).document(customer_id)
|
||||
doc = doc_ref.get()
|
||||
if not doc.exists:
|
||||
raise NotFoundError("Customer")
|
||||
current = doc.to_dict().get("has_problem", False)
|
||||
update_data = {"has_problem": not current, "updated_at": datetime.utcnow().isoformat()}
|
||||
doc_ref.update(update_data)
|
||||
return _doc_to_customer(doc_ref.get())
|
||||
|
||||
|
||||
async def get_last_comm_direction(customer_id: str) -> str | None:
|
||||
"""Return 'inbound' or 'outbound' of the most recent comm for this customer, or None."""
|
||||
db = await mqtt_db.get_db()
|
||||
rows = await db.execute_fetchall(
|
||||
"SELECT direction FROM crm_comms_log WHERE customer_id = ? "
|
||||
"AND direction IN ('inbound', 'outbound') "
|
||||
"ORDER BY COALESCE(occurred_at, created_at) DESC, created_at DESC LIMIT 1",
|
||||
(customer_id,),
|
||||
)
|
||||
if rows:
|
||||
return rows[0][0]
|
||||
return None
|
||||
|
||||
|
||||
async def get_last_comm_timestamp(customer_id: str) -> str | None:
|
||||
"""Return the ISO timestamp of the most recent comm for this customer, or None."""
|
||||
db = await mqtt_db.get_db()
|
||||
rows = await db.execute_fetchall(
|
||||
"SELECT COALESCE(occurred_at, created_at) as ts FROM crm_comms_log "
|
||||
"WHERE customer_id = ? ORDER BY ts DESC LIMIT 1",
|
||||
(customer_id,),
|
||||
)
|
||||
if rows:
|
||||
return rows[0][0]
|
||||
return None
|
||||
|
||||
|
||||
async def list_customers_sorted_by_latest_comm(customers: list[CustomerInDB]) -> list[CustomerInDB]:
|
||||
"""Re-sort a list of customers so those with the most recent comm come first."""
|
||||
timestamps = await asyncio.gather(
|
||||
*[get_last_comm_timestamp(c.id) for c in customers]
|
||||
)
|
||||
paired = list(zip(customers, timestamps))
|
||||
paired.sort(key=lambda x: x[1] or "", reverse=True)
|
||||
return [c for c, _ in paired]
|
||||
|
||||
|
||||
def delete_customer(customer_id: str) -> CustomerInDB:
|
||||
"""Delete customer from Firestore. Returns the customer data (for NC path lookup)."""
|
||||
db = get_db()
|
||||
doc_ref = db.collection(CUSTOMERS_COLLECTION).document(customer_id)
|
||||
doc = doc_ref.get()
|
||||
if not doc.exists:
|
||||
raise NotFoundError("Customer")
|
||||
customer = _doc_to_customer(doc)
|
||||
doc_ref.delete()
|
||||
return customer
|
||||
|
||||
|
||||
async def delete_customer_comms(customer_id: str) -> int:
|
||||
"""Delete all comm log entries for a customer. Returns count deleted."""
|
||||
db = await mqtt_db.get_db()
|
||||
cursor = await db.execute(
|
||||
"DELETE FROM crm_comms_log WHERE customer_id = ?", (customer_id,)
|
||||
)
|
||||
await db.commit()
|
||||
return cursor.rowcount
|
||||
|
||||
|
||||
async def delete_customer_media_entries(customer_id: str) -> int:
|
||||
"""Delete all media DB entries for a customer. Returns count deleted."""
|
||||
db = await mqtt_db.get_db()
|
||||
cursor = await db.execute(
|
||||
"DELETE FROM crm_media WHERE customer_id = ?", (customer_id,)
|
||||
)
|
||||
await db.commit()
|
||||
return cursor.rowcount
|
||||
|
||||
|
||||
# ── Orders ───────────────────────────────────────────────────────────────────
|
||||
@@ -594,11 +730,11 @@ async def create_media(data: MediaCreate) -> MediaInDB:
|
||||
await db.execute(
|
||||
"""INSERT INTO crm_media
|
||||
(id, customer_id, order_id, filename, nextcloud_path, mime_type,
|
||||
direction, tags, uploaded_by, created_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
|
||||
direction, tags, uploaded_by, thumbnail_path, created_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
|
||||
(media_id, data.customer_id, data.order_id, data.filename,
|
||||
data.nextcloud_path, data.mime_type, direction,
|
||||
tags_json, data.uploaded_by, now),
|
||||
tags_json, data.uploaded_by, data.thumbnail_path, now),
|
||||
)
|
||||
await db.commit()
|
||||
|
||||
|
||||
125
backend/crm/thumbnails.py
Normal file
125
backend/crm/thumbnails.py
Normal file
@@ -0,0 +1,125 @@
|
||||
"""
|
||||
Thumbnail generation for uploaded media files.
|
||||
|
||||
Supports:
|
||||
- Images (via Pillow): JPEG thumbnail at 300×300 max
|
||||
- Videos (via ffmpeg subprocess): extract first frame as JPEG
|
||||
- PDFs (via pdf2image + Poppler): render first page as JPEG
|
||||
|
||||
Returns None if the type is unsupported or if generation fails.
|
||||
"""
|
||||
import io
|
||||
import logging
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
THUMB_SIZE = (220, 220) # small enough for gallery tiles; keeps files ~4-6 KB
|
||||
|
||||
|
||||
def _thumb_from_image(content: bytes) -> bytes | None:
|
||||
try:
|
||||
from PIL import Image, ImageOps
|
||||
img = Image.open(io.BytesIO(content))
|
||||
img = ImageOps.exif_transpose(img) # honour EXIF Orientation tag before resizing
|
||||
img = img.convert("RGB")
|
||||
img.thumbnail(THUMB_SIZE, Image.LANCZOS)
|
||||
out = io.BytesIO()
|
||||
# quality=55 + optimize=True + progressive encoding → ~4-6 KB for typical photos
|
||||
img.save(out, format="JPEG", quality=65, optimize=True, progressive=True)
|
||||
return out.getvalue()
|
||||
except Exception as e:
|
||||
logger.warning("Image thumbnail failed: %s", e)
|
||||
return None
|
||||
|
||||
|
||||
def _thumb_from_video(content: bytes) -> bytes | None:
|
||||
"""
|
||||
Extract the first frame of a video as a JPEG thumbnail.
|
||||
|
||||
We write the video to a temp file instead of piping it to ffmpeg because
|
||||
most video containers (MP4, MOV, MKV …) store their index (moov atom) at
|
||||
an arbitrary offset and ffmpeg cannot seek on a pipe — causing rc≠0 with
|
||||
"moov atom not found" or similar errors when stdin is used.
|
||||
"""
|
||||
import tempfile
|
||||
import os
|
||||
try:
|
||||
# Write to a temp file so ffmpeg can seek freely
|
||||
with tempfile.NamedTemporaryFile(suffix=".video", delete=False) as tmp_in:
|
||||
tmp_in.write(content)
|
||||
tmp_in_path = tmp_in.name
|
||||
|
||||
with tempfile.NamedTemporaryFile(suffix=".jpg", delete=False) as tmp_out:
|
||||
tmp_out_path = tmp_out.name
|
||||
|
||||
try:
|
||||
result = subprocess.run(
|
||||
[
|
||||
"ffmpeg", "-y",
|
||||
"-i", tmp_in_path,
|
||||
"-vframes", "1",
|
||||
"-vf", f"scale={THUMB_SIZE[0]}:-2",
|
||||
"-q:v", "4", # JPEG quality 1-31 (lower = better); 4 ≈ ~80% quality
|
||||
tmp_out_path,
|
||||
],
|
||||
capture_output=True,
|
||||
timeout=60,
|
||||
)
|
||||
if result.returncode == 0 and os.path.getsize(tmp_out_path) > 0:
|
||||
with open(tmp_out_path, "rb") as f:
|
||||
return f.read()
|
||||
logger.warning(
|
||||
"ffmpeg video thumb failed (rc=%s): %s",
|
||||
result.returncode,
|
||||
result.stderr[-400:].decode(errors="replace") if result.stderr else "",
|
||||
)
|
||||
return None
|
||||
finally:
|
||||
os.unlink(tmp_in_path)
|
||||
try:
|
||||
os.unlink(tmp_out_path)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
except FileNotFoundError:
|
||||
logger.warning("ffmpeg not found — video thumbnails unavailable")
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.warning("Video thumbnail failed: %s", e)
|
||||
return None
|
||||
|
||||
|
||||
def _thumb_from_pdf(content: bytes) -> bytes | None:
|
||||
try:
|
||||
from pdf2image import convert_from_bytes
|
||||
pages = convert_from_bytes(content, first_page=1, last_page=1, size=THUMB_SIZE)
|
||||
if not pages:
|
||||
return None
|
||||
out = io.BytesIO()
|
||||
pages[0].save(out, format="JPEG", quality=55, optimize=True, progressive=True)
|
||||
return out.getvalue()
|
||||
except ImportError:
|
||||
logger.warning("pdf2image not installed — PDF thumbnails unavailable")
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.warning("PDF thumbnail failed: %s", e)
|
||||
return None
|
||||
|
||||
|
||||
def generate_thumbnail(content: bytes, mime_type: str, filename: str) -> bytes | None:
|
||||
"""
|
||||
Generate a small JPEG thumbnail for the given file content.
|
||||
Returns JPEG bytes or None if unsupported / generation fails.
|
||||
"""
|
||||
mt = (mime_type or "").lower()
|
||||
fn = (filename or "").lower()
|
||||
|
||||
if mt.startswith("image/"):
|
||||
return _thumb_from_image(content)
|
||||
if mt.startswith("video/"):
|
||||
return _thumb_from_video(content)
|
||||
if mt == "application/pdf" or fn.endswith(".pdf"):
|
||||
return _thumb_from_pdf(content)
|
||||
|
||||
return None
|
||||
Reference in New Issue
Block a user