620 lines
20 KiB
Python
620 lines
20 KiB
Python
import json
|
|
import uuid
|
|
from datetime import datetime
|
|
|
|
from fastapi import HTTPException
|
|
from shared.firebase import get_db
|
|
from shared.exceptions import NotFoundError
|
|
import re as _re
|
|
from mqtt import database as mqtt_db
|
|
from crm.models import (
|
|
ProductCreate, ProductUpdate, ProductInDB,
|
|
CustomerCreate, CustomerUpdate, CustomerInDB,
|
|
OrderCreate, OrderUpdate, OrderInDB,
|
|
CommCreate, CommUpdate, CommInDB,
|
|
MediaCreate, MediaInDB,
|
|
)
|
|
|
|
COLLECTION = "crm_products"
|
|
|
|
|
|
def _doc_to_product(doc) -> ProductInDB:
|
|
data = doc.to_dict()
|
|
return ProductInDB(id=doc.id, **data)
|
|
|
|
|
|
def list_products(
|
|
search: str | None = None,
|
|
category: str | None = None,
|
|
active_only: bool = False,
|
|
) -> list[ProductInDB]:
|
|
db = get_db()
|
|
query = db.collection(COLLECTION)
|
|
|
|
if active_only:
|
|
query = query.where("active", "==", True)
|
|
|
|
if category:
|
|
query = query.where("category", "==", category)
|
|
|
|
results = []
|
|
for doc in query.stream():
|
|
product = _doc_to_product(doc)
|
|
|
|
if search:
|
|
s = search.lower()
|
|
if not (
|
|
s in (product.name or "").lower()
|
|
or s in (product.sku or "").lower()
|
|
or s in (product.description or "").lower()
|
|
):
|
|
continue
|
|
|
|
results.append(product)
|
|
|
|
return results
|
|
|
|
|
|
def get_product(product_id: str) -> ProductInDB:
|
|
db = get_db()
|
|
doc = db.collection(COLLECTION).document(product_id).get()
|
|
if not doc.exists:
|
|
raise NotFoundError("Product")
|
|
return _doc_to_product(doc)
|
|
|
|
|
|
def create_product(data: ProductCreate) -> ProductInDB:
|
|
db = get_db()
|
|
now = datetime.utcnow().isoformat()
|
|
product_id = str(uuid.uuid4())
|
|
|
|
doc_data = data.model_dump()
|
|
doc_data["created_at"] = now
|
|
doc_data["updated_at"] = now
|
|
|
|
# Serialize nested enums/models
|
|
if doc_data.get("category"):
|
|
doc_data["category"] = doc_data["category"].value if hasattr(doc_data["category"], "value") else doc_data["category"]
|
|
if doc_data.get("costs") and hasattr(doc_data["costs"], "model_dump"):
|
|
doc_data["costs"] = doc_data["costs"].model_dump()
|
|
if doc_data.get("stock") and hasattr(doc_data["stock"], "model_dump"):
|
|
doc_data["stock"] = doc_data["stock"].model_dump()
|
|
|
|
db.collection(COLLECTION).document(product_id).set(doc_data)
|
|
return ProductInDB(id=product_id, **doc_data)
|
|
|
|
|
|
def update_product(product_id: str, data: ProductUpdate) -> ProductInDB:
|
|
db = get_db()
|
|
doc_ref = db.collection(COLLECTION).document(product_id)
|
|
doc = doc_ref.get()
|
|
if not doc.exists:
|
|
raise NotFoundError("Product")
|
|
|
|
update_data = data.model_dump(exclude_none=True)
|
|
update_data["updated_at"] = datetime.utcnow().isoformat()
|
|
|
|
if "category" in update_data and hasattr(update_data["category"], "value"):
|
|
update_data["category"] = update_data["category"].value
|
|
if "costs" in update_data and hasattr(update_data["costs"], "model_dump"):
|
|
update_data["costs"] = update_data["costs"].model_dump()
|
|
if "stock" in update_data and hasattr(update_data["stock"], "model_dump"):
|
|
update_data["stock"] = update_data["stock"].model_dump()
|
|
|
|
doc_ref.update(update_data)
|
|
updated_doc = doc_ref.get()
|
|
return _doc_to_product(updated_doc)
|
|
|
|
|
|
def delete_product(product_id: str) -> None:
|
|
db = get_db()
|
|
doc_ref = db.collection(COLLECTION).document(product_id)
|
|
doc = doc_ref.get()
|
|
if not doc.exists:
|
|
raise NotFoundError("Product")
|
|
doc_ref.delete()
|
|
|
|
|
|
# ── Customers ────────────────────────────────────────────────────────────────
|
|
|
|
CUSTOMERS_COLLECTION = "crm_customers"
|
|
|
|
|
|
def _doc_to_customer(doc) -> CustomerInDB:
|
|
data = doc.to_dict()
|
|
return CustomerInDB(id=doc.id, **data)
|
|
|
|
|
|
def list_customers(
|
|
search: str | None = None,
|
|
tag: str | None = None,
|
|
) -> list[CustomerInDB]:
|
|
db = get_db()
|
|
query = db.collection(CUSTOMERS_COLLECTION)
|
|
|
|
if tag:
|
|
query = query.where("tags", "array_contains", tag)
|
|
|
|
results = []
|
|
for doc in query.stream():
|
|
customer = _doc_to_customer(doc)
|
|
|
|
if search:
|
|
s = search.lower()
|
|
name_match = s in (customer.name or "").lower()
|
|
surname_match = s in (customer.surname or "").lower()
|
|
org_match = s in (customer.organization or "").lower()
|
|
contact_match = any(
|
|
s in (c.value or "").lower()
|
|
for c in (customer.contacts or [])
|
|
)
|
|
loc = customer.location or {}
|
|
loc_match = (
|
|
s in (loc.get("city", "") or "").lower() or
|
|
s in (loc.get("country", "") or "").lower() or
|
|
s in (loc.get("region", "") or "").lower()
|
|
)
|
|
tag_match = any(s in (t or "").lower() for t in (customer.tags or []))
|
|
if not (name_match or surname_match or org_match or contact_match or loc_match or tag_match):
|
|
continue
|
|
|
|
results.append(customer)
|
|
|
|
return results
|
|
|
|
|
|
def get_customer(customer_id: str) -> CustomerInDB:
|
|
db = get_db()
|
|
doc = db.collection(CUSTOMERS_COLLECTION).document(customer_id).get()
|
|
if not doc.exists:
|
|
raise NotFoundError("Customer")
|
|
return _doc_to_customer(doc)
|
|
|
|
|
|
def get_customer_nc_path(customer: CustomerInDB) -> str:
|
|
"""Return the Nextcloud folder slug for a customer. Falls back to UUID for legacy records."""
|
|
return customer.folder_id if customer.folder_id else customer.id
|
|
|
|
|
|
def create_customer(data: CustomerCreate) -> CustomerInDB:
|
|
db = get_db()
|
|
|
|
# Validate folder_id
|
|
if not data.folder_id or not data.folder_id.strip():
|
|
raise HTTPException(status_code=422, detail="Internal Folder ID is required.")
|
|
folder_id = data.folder_id.strip().lower()
|
|
if not _re.match(r'^[a-z0-9][a-z0-9\-]*[a-z0-9]$', folder_id):
|
|
raise HTTPException(
|
|
status_code=422,
|
|
detail="Internal Folder ID must contain only lowercase letters, numbers, and hyphens, and cannot start or end with a hyphen.",
|
|
)
|
|
# Check uniqueness
|
|
existing = list(db.collection(CUSTOMERS_COLLECTION).where("folder_id", "==", folder_id).limit(1).stream())
|
|
if existing:
|
|
raise HTTPException(status_code=409, detail=f"A customer with folder ID '{folder_id}' already exists.")
|
|
|
|
now = datetime.utcnow().isoformat()
|
|
customer_id = str(uuid.uuid4())
|
|
|
|
doc_data = data.model_dump()
|
|
doc_data["folder_id"] = folder_id
|
|
doc_data["created_at"] = now
|
|
doc_data["updated_at"] = now
|
|
|
|
db.collection(CUSTOMERS_COLLECTION).document(customer_id).set(doc_data)
|
|
return CustomerInDB(id=customer_id, **doc_data)
|
|
|
|
|
|
def update_customer(customer_id: str, data: CustomerUpdate) -> CustomerInDB:
|
|
db = get_db()
|
|
doc_ref = db.collection(CUSTOMERS_COLLECTION).document(customer_id)
|
|
doc = doc_ref.get()
|
|
if not doc.exists:
|
|
raise NotFoundError("Customer")
|
|
|
|
update_data = data.model_dump(exclude_none=True)
|
|
update_data["updated_at"] = datetime.utcnow().isoformat()
|
|
|
|
doc_ref.update(update_data)
|
|
updated_doc = doc_ref.get()
|
|
return _doc_to_customer(updated_doc)
|
|
|
|
|
|
def delete_customer(customer_id: str) -> None:
|
|
db = get_db()
|
|
doc_ref = db.collection(CUSTOMERS_COLLECTION).document(customer_id)
|
|
doc = doc_ref.get()
|
|
if not doc.exists:
|
|
raise NotFoundError("Customer")
|
|
doc_ref.delete()
|
|
|
|
|
|
# ── Orders ───────────────────────────────────────────────────────────────────
|
|
|
|
ORDERS_COLLECTION = "crm_orders"
|
|
|
|
|
|
def _doc_to_order(doc) -> OrderInDB:
|
|
data = doc.to_dict()
|
|
return OrderInDB(id=doc.id, **data)
|
|
|
|
|
|
def _generate_order_number(db) -> str:
|
|
year = datetime.utcnow().year
|
|
prefix = f"ORD-{year}-"
|
|
max_n = 0
|
|
for doc in db.collection(ORDERS_COLLECTION).stream():
|
|
data = doc.to_dict()
|
|
num = data.get("order_number", "")
|
|
if num and num.startswith(prefix):
|
|
try:
|
|
n = int(num[len(prefix):])
|
|
if n > max_n:
|
|
max_n = n
|
|
except ValueError:
|
|
pass
|
|
return f"{prefix}{max_n + 1:03d}"
|
|
|
|
|
|
def list_orders(
|
|
customer_id: str | None = None,
|
|
status: str | None = None,
|
|
payment_status: str | None = None,
|
|
) -> list[OrderInDB]:
|
|
db = get_db()
|
|
query = db.collection(ORDERS_COLLECTION)
|
|
|
|
if customer_id:
|
|
query = query.where("customer_id", "==", customer_id)
|
|
if status:
|
|
query = query.where("status", "==", status)
|
|
if payment_status:
|
|
query = query.where("payment_status", "==", payment_status)
|
|
|
|
return [_doc_to_order(doc) for doc in query.stream()]
|
|
|
|
|
|
def get_order(order_id: str) -> OrderInDB:
|
|
db = get_db()
|
|
doc = db.collection(ORDERS_COLLECTION).document(order_id).get()
|
|
if not doc.exists:
|
|
raise NotFoundError("Order")
|
|
return _doc_to_order(doc)
|
|
|
|
|
|
def create_order(data: OrderCreate) -> OrderInDB:
|
|
db = get_db()
|
|
now = datetime.utcnow().isoformat()
|
|
order_id = str(uuid.uuid4())
|
|
|
|
doc_data = data.model_dump()
|
|
if not doc_data.get("order_number"):
|
|
doc_data["order_number"] = _generate_order_number(db)
|
|
doc_data["created_at"] = now
|
|
doc_data["updated_at"] = now
|
|
|
|
db.collection(ORDERS_COLLECTION).document(order_id).set(doc_data)
|
|
return OrderInDB(id=order_id, **doc_data)
|
|
|
|
|
|
def update_order(order_id: str, data: OrderUpdate) -> OrderInDB:
|
|
db = get_db()
|
|
doc_ref = db.collection(ORDERS_COLLECTION).document(order_id)
|
|
doc = doc_ref.get()
|
|
if not doc.exists:
|
|
raise NotFoundError("Order")
|
|
|
|
update_data = data.model_dump(exclude_none=True)
|
|
update_data["updated_at"] = datetime.utcnow().isoformat()
|
|
|
|
doc_ref.update(update_data)
|
|
updated_doc = doc_ref.get()
|
|
return _doc_to_order(updated_doc)
|
|
|
|
|
|
def delete_order(order_id: str) -> None:
|
|
db = get_db()
|
|
doc_ref = db.collection(ORDERS_COLLECTION).document(order_id)
|
|
doc = doc_ref.get()
|
|
if not doc.exists:
|
|
raise NotFoundError("Order")
|
|
doc_ref.delete()
|
|
|
|
|
|
# ── Comms Log (SQLite, async) ─────────────────────────────────────────────────
|
|
|
|
def _row_to_comm(row: dict) -> CommInDB:
|
|
row = dict(row)
|
|
raw_attachments = json.loads(row.get("attachments") or "[]")
|
|
# Normalise attachment dicts — tolerate both synced (content_type/size) and
|
|
# sent (nextcloud_path) shapes so Pydantic never sees missing required fields.
|
|
row["attachments"] = [
|
|
{k: v for k, v in a.items() if k in ("filename", "nextcloud_path", "content_type", "size")}
|
|
for a in raw_attachments if isinstance(a, dict) and a.get("filename")
|
|
]
|
|
if row.get("to_addrs") and isinstance(row["to_addrs"], str):
|
|
try:
|
|
row["to_addrs"] = json.loads(row["to_addrs"])
|
|
except Exception:
|
|
row["to_addrs"] = []
|
|
# SQLite stores booleans as integers
|
|
row["is_important"] = bool(row.get("is_important", 0))
|
|
row["is_read"] = bool(row.get("is_read", 0))
|
|
return CommInDB(**{k: v for k, v in row.items() if k in CommInDB.model_fields})
|
|
|
|
|
|
async def list_comms(
|
|
customer_id: str,
|
|
type: str | None = None,
|
|
direction: str | None = None,
|
|
limit: int = 100,
|
|
) -> list[CommInDB]:
|
|
db = await mqtt_db.get_db()
|
|
where = ["customer_id = ?"]
|
|
params: list = [customer_id]
|
|
if type:
|
|
where.append("type = ?")
|
|
params.append(type)
|
|
if direction:
|
|
where.append("direction = ?")
|
|
params.append(direction)
|
|
clause = " AND ".join(where)
|
|
rows = await db.execute_fetchall(
|
|
f"SELECT * FROM crm_comms_log WHERE {clause} ORDER BY COALESCE(occurred_at, created_at) DESC, created_at DESC LIMIT ?",
|
|
params + [limit],
|
|
)
|
|
entries = [_row_to_comm(dict(r)) for r in rows]
|
|
|
|
# Fallback: include unlinked email rows (customer_id NULL) if addresses match this customer.
|
|
# This covers historical rows created before automatic outbound customer linking.
|
|
fs = get_db()
|
|
doc = fs.collection("crm_customers").document(customer_id).get()
|
|
if doc.exists:
|
|
data = doc.to_dict() or {}
|
|
customer_emails = {
|
|
(c.get("value") or "").strip().lower()
|
|
for c in (data.get("contacts") or [])
|
|
if c.get("type") == "email" and c.get("value")
|
|
}
|
|
else:
|
|
customer_emails = set()
|
|
|
|
if customer_emails:
|
|
extra_where = [
|
|
"type = 'email'",
|
|
"(customer_id IS NULL OR customer_id = '')",
|
|
]
|
|
extra_params: list = []
|
|
if direction:
|
|
extra_where.append("direction = ?")
|
|
extra_params.append(direction)
|
|
extra_clause = " AND ".join(extra_where)
|
|
extra_rows = await db.execute_fetchall(
|
|
f"SELECT * FROM crm_comms_log WHERE {extra_clause} "
|
|
"ORDER BY COALESCE(occurred_at, created_at) DESC, created_at DESC LIMIT ?",
|
|
extra_params + [max(limit, 300)],
|
|
)
|
|
for r in extra_rows:
|
|
e = _row_to_comm(dict(r))
|
|
from_addr = (e.from_addr or "").strip().lower()
|
|
to_addrs = [(a or "").strip().lower() for a in (e.to_addrs or [])]
|
|
matched = (from_addr in customer_emails) or any(a in customer_emails for a in to_addrs)
|
|
if matched:
|
|
entries.append(e)
|
|
|
|
# De-duplicate and sort consistently
|
|
uniq = {e.id: e for e in entries}
|
|
sorted_entries = sorted(
|
|
uniq.values(),
|
|
key=lambda e: ((e.occurred_at or e.created_at or ""), (e.created_at or ""), (e.id or "")),
|
|
reverse=True,
|
|
)
|
|
return sorted_entries[:limit]
|
|
|
|
|
|
async def list_all_emails(
|
|
direction: str | None = None,
|
|
customers_only: bool = False,
|
|
mail_accounts: list[str] | None = None,
|
|
limit: int = 500,
|
|
) -> list[CommInDB]:
|
|
db = await mqtt_db.get_db()
|
|
where = ["type = 'email'"]
|
|
params: list = []
|
|
if direction:
|
|
where.append("direction = ?")
|
|
params.append(direction)
|
|
if customers_only:
|
|
where.append("customer_id IS NOT NULL")
|
|
if mail_accounts:
|
|
placeholders = ",".join("?" for _ in mail_accounts)
|
|
where.append(f"mail_account IN ({placeholders})")
|
|
params.extend(mail_accounts)
|
|
clause = f"WHERE {' AND '.join(where)}"
|
|
rows = await db.execute_fetchall(
|
|
f"SELECT * FROM crm_comms_log {clause} ORDER BY COALESCE(occurred_at, created_at) DESC, created_at DESC LIMIT ?",
|
|
params + [limit],
|
|
)
|
|
return [_row_to_comm(dict(r)) for r in rows]
|
|
|
|
|
|
async def list_all_comms(
|
|
type: str | None = None,
|
|
direction: str | None = None,
|
|
limit: int = 200,
|
|
) -> list[CommInDB]:
|
|
db = await mqtt_db.get_db()
|
|
where = []
|
|
params: list = []
|
|
if type:
|
|
where.append("type = ?")
|
|
params.append(type)
|
|
if direction:
|
|
where.append("direction = ?")
|
|
params.append(direction)
|
|
clause = f"WHERE {' AND '.join(where)}" if where else ""
|
|
rows = await db.execute_fetchall(
|
|
f"SELECT * FROM crm_comms_log {clause} ORDER BY COALESCE(occurred_at, created_at) DESC, created_at DESC LIMIT ?",
|
|
params + [limit],
|
|
)
|
|
return [_row_to_comm(dict(r)) for r in rows]
|
|
|
|
|
|
async def get_comm(comm_id: str) -> CommInDB:
|
|
db = await mqtt_db.get_db()
|
|
rows = await db.execute_fetchall(
|
|
"SELECT * FROM crm_comms_log WHERE id = ?", (comm_id,)
|
|
)
|
|
if not rows:
|
|
raise HTTPException(status_code=404, detail="Comm entry not found")
|
|
return _row_to_comm(dict(rows[0]))
|
|
|
|
|
|
async def create_comm(data: CommCreate) -> CommInDB:
|
|
db = await mqtt_db.get_db()
|
|
now = datetime.utcnow().isoformat()
|
|
comm_id = str(uuid.uuid4())
|
|
occurred_at = data.occurred_at or now
|
|
attachments_json = json.dumps([a.model_dump() for a in data.attachments])
|
|
|
|
await db.execute(
|
|
"""INSERT INTO crm_comms_log
|
|
(id, customer_id, type, mail_account, direction, subject, body, attachments,
|
|
ext_message_id, logged_by, occurred_at, created_at)
|
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
|
|
(comm_id, data.customer_id, data.type.value, data.mail_account, data.direction.value,
|
|
data.subject, data.body, attachments_json,
|
|
data.ext_message_id, data.logged_by, occurred_at, now),
|
|
)
|
|
await db.commit()
|
|
return await get_comm(comm_id)
|
|
|
|
|
|
async def update_comm(comm_id: str, data: CommUpdate) -> CommInDB:
|
|
db = await mqtt_db.get_db()
|
|
rows = await db.execute_fetchall(
|
|
"SELECT id FROM crm_comms_log WHERE id = ?", (comm_id,)
|
|
)
|
|
if not rows:
|
|
raise HTTPException(status_code=404, detail="Comm entry not found")
|
|
|
|
updates = data.model_dump(exclude_none=True)
|
|
if not updates:
|
|
return await get_comm(comm_id)
|
|
|
|
set_clause = ", ".join(f"{k} = ?" for k in updates)
|
|
await db.execute(
|
|
f"UPDATE crm_comms_log SET {set_clause} WHERE id = ?",
|
|
list(updates.values()) + [comm_id],
|
|
)
|
|
await db.commit()
|
|
return await get_comm(comm_id)
|
|
|
|
|
|
async def delete_comm(comm_id: str) -> None:
|
|
db = await mqtt_db.get_db()
|
|
rows = await db.execute_fetchall(
|
|
"SELECT id FROM crm_comms_log WHERE id = ?", (comm_id,)
|
|
)
|
|
if not rows:
|
|
raise HTTPException(status_code=404, detail="Comm entry not found")
|
|
await db.execute("DELETE FROM crm_comms_log WHERE id = ?", (comm_id,))
|
|
await db.commit()
|
|
|
|
|
|
async def delete_comms_bulk(ids: list[str]) -> int:
|
|
"""Delete multiple comm entries. Returns count deleted."""
|
|
if not ids:
|
|
return 0
|
|
db = await mqtt_db.get_db()
|
|
placeholders = ",".join("?" for _ in ids)
|
|
cursor = await db.execute(
|
|
f"DELETE FROM crm_comms_log WHERE id IN ({placeholders})", ids
|
|
)
|
|
await db.commit()
|
|
return cursor.rowcount
|
|
|
|
|
|
async def set_comm_important(comm_id: str, important: bool) -> CommInDB:
|
|
db = await mqtt_db.get_db()
|
|
await db.execute(
|
|
"UPDATE crm_comms_log SET is_important = ? WHERE id = ?",
|
|
(1 if important else 0, comm_id),
|
|
)
|
|
await db.commit()
|
|
return await get_comm(comm_id)
|
|
|
|
|
|
async def set_comm_read(comm_id: str, read: bool) -> CommInDB:
|
|
db = await mqtt_db.get_db()
|
|
await db.execute(
|
|
"UPDATE crm_comms_log SET is_read = ? WHERE id = ?",
|
|
(1 if read else 0, comm_id),
|
|
)
|
|
await db.commit()
|
|
return await get_comm(comm_id)
|
|
|
|
|
|
# ── Media (SQLite, async) ─────────────────────────────────────────────────────
|
|
|
|
def _row_to_media(row: dict) -> MediaInDB:
|
|
row = dict(row)
|
|
row["tags"] = json.loads(row.get("tags") or "[]")
|
|
return MediaInDB(**row)
|
|
|
|
|
|
async def list_media(
|
|
customer_id: str | None = None,
|
|
order_id: str | None = None,
|
|
) -> list[MediaInDB]:
|
|
db = await mqtt_db.get_db()
|
|
where = []
|
|
params: list = []
|
|
if customer_id:
|
|
where.append("customer_id = ?")
|
|
params.append(customer_id)
|
|
if order_id:
|
|
where.append("order_id = ?")
|
|
params.append(order_id)
|
|
clause = f"WHERE {' AND '.join(where)}" if where else ""
|
|
rows = await db.execute_fetchall(
|
|
f"SELECT * FROM crm_media {clause} ORDER BY created_at DESC",
|
|
params,
|
|
)
|
|
return [_row_to_media(dict(r)) for r in rows]
|
|
|
|
|
|
async def create_media(data: MediaCreate) -> MediaInDB:
|
|
db = await mqtt_db.get_db()
|
|
now = datetime.utcnow().isoformat()
|
|
media_id = str(uuid.uuid4())
|
|
tags_json = json.dumps(data.tags)
|
|
direction = data.direction.value if data.direction else None
|
|
|
|
await db.execute(
|
|
"""INSERT INTO crm_media
|
|
(id, customer_id, order_id, filename, nextcloud_path, mime_type,
|
|
direction, tags, uploaded_by, created_at)
|
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
|
|
(media_id, data.customer_id, data.order_id, data.filename,
|
|
data.nextcloud_path, data.mime_type, direction,
|
|
tags_json, data.uploaded_by, now),
|
|
)
|
|
await db.commit()
|
|
|
|
rows = await db.execute_fetchall(
|
|
"SELECT * FROM crm_media WHERE id = ?", (media_id,)
|
|
)
|
|
return _row_to_media(dict(rows[0]))
|
|
|
|
|
|
async def delete_media(media_id: str) -> None:
|
|
db = await mqtt_db.get_db()
|
|
rows = await db.execute_fetchall(
|
|
"SELECT id FROM crm_media WHERE id = ?", (media_id,)
|
|
)
|
|
if not rows:
|
|
raise HTTPException(status_code=404, detail="Media entry not found")
|
|
await db.execute("DELETE FROM crm_media WHERE id = ?", (media_id,))
|
|
await db.commit()
|