Replace import_orders (insert-per-run) with orders table (one row per order, upsert on conflict). Eliminates dedup CTE on every dashboard query and prevents unbounded row growth at 4-500 orders/sync. Key changes: - orders table: PK order_number, upsert via ON CONFLICT DO UPDATE; COALESCE preserves id_comanda once set; times_skipped auto-increments - sync_run_orders: lightweight junction (sync_run_id, order_number) replaces sync_run_id column on orders - order_items: PK changed to (order_number, sku), INSERT OR IGNORE - Auto-migration in init_sqlite(): import_orders → orders on first boot, old table renamed to import_orders_bak - /api/dashboard/orders: period_days param (3/7/30/0=all, default 7) - Dashboard: period selector buttons in orders card header - start.sh: stop existing process on port 5003 before restart; remove --reload (broken on WSL2 /mnt/e/) - Add invoice_service, E2E Playwright tests, Oracle package updates Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
132 lines
4.7 KiB
Python
132 lines
4.7 KiB
Python
import asyncio
|
|
import csv
|
|
import io
|
|
import json
|
|
from fastapi import APIRouter, Query
|
|
from fastapi.responses import StreamingResponse
|
|
|
|
from ..services import order_reader, validation_service, sqlite_service
|
|
from ..database import get_sqlite
|
|
|
|
router = APIRouter(prefix="/api/validate", tags=["validation"])
|
|
|
|
@router.post("/scan")
|
|
async def scan_and_validate():
|
|
"""Scan JSON files and validate all SKUs."""
|
|
orders, json_count = order_reader.read_json_orders()
|
|
|
|
if not orders:
|
|
return {"orders": 0, "json_files": json_count, "skus": {}, "message": "No orders found"}
|
|
|
|
all_skus = order_reader.get_all_skus(orders)
|
|
result = validation_service.validate_skus(all_skus)
|
|
importable, skipped = validation_service.classify_orders(orders, result)
|
|
|
|
# Find new orders (not yet in Oracle)
|
|
all_order_numbers = [o.number for o in orders]
|
|
new_orders = await asyncio.to_thread(validation_service.find_new_orders, all_order_numbers)
|
|
|
|
# Build SKU context from skipped orders and track missing SKUs
|
|
sku_context = {} # sku -> {order_numbers: [], customers: []}
|
|
for order, missing_list in skipped:
|
|
customer = order.billing.company_name or f"{order.billing.firstname} {order.billing.lastname}"
|
|
for sku in missing_list:
|
|
if sku not in sku_context:
|
|
sku_context[sku] = {"order_numbers": [], "customers": []}
|
|
sku_context[sku]["order_numbers"].append(order.number)
|
|
if customer not in sku_context[sku]["customers"]:
|
|
sku_context[sku]["customers"].append(customer)
|
|
|
|
for sku in result["missing"]:
|
|
# Find product name from orders
|
|
product_name = ""
|
|
for order in orders:
|
|
for item in order.items:
|
|
if item.sku == sku:
|
|
product_name = item.name
|
|
break
|
|
if product_name:
|
|
break
|
|
|
|
ctx = sku_context.get(sku, {})
|
|
await sqlite_service.track_missing_sku(
|
|
sku=sku,
|
|
product_name=product_name,
|
|
order_count=len(ctx.get("order_numbers", [])),
|
|
order_numbers=json.dumps(ctx.get("order_numbers", [])),
|
|
customers=json.dumps(ctx.get("customers", []))
|
|
)
|
|
|
|
return {
|
|
"json_files": json_count,
|
|
"total_orders": len(orders),
|
|
"total_skus": len(all_skus),
|
|
"importable": len(importable),
|
|
"skipped": len(skipped),
|
|
"new_orders": len(new_orders),
|
|
"skus": {
|
|
"mapped": len(result["mapped"]),
|
|
"direct": len(result["direct"]),
|
|
"missing": len(result["missing"]),
|
|
"missing_list": sorted(result["missing"]),
|
|
"total_skus": len(all_skus),
|
|
"mapped_skus": len(result["mapped"]),
|
|
"direct_skus": len(result["direct"])
|
|
},
|
|
"skipped_orders": [
|
|
{
|
|
"number": order.number,
|
|
"customer": order.billing.company_name or f"{order.billing.firstname} {order.billing.lastname}",
|
|
"items_count": len(order.items),
|
|
"missing_skus": missing
|
|
}
|
|
for order, missing in skipped[:50] # limit to 50
|
|
]
|
|
}
|
|
|
|
@router.get("/missing-skus")
|
|
async def get_missing_skus(
|
|
page: int = Query(1, ge=1),
|
|
per_page: int = Query(20, ge=1, le=100),
|
|
resolved: int = Query(0, ge=-1, le=1)
|
|
):
|
|
"""Get paginated missing SKUs. resolved=-1 means show all (R10)."""
|
|
result = await sqlite_service.get_missing_skus_paginated(page, per_page, resolved)
|
|
# Backward compat: also include 'unresolved' count
|
|
db = await get_sqlite()
|
|
try:
|
|
cursor = await db.execute(
|
|
"SELECT COUNT(*) FROM missing_skus WHERE resolved = 0"
|
|
)
|
|
unresolved = (await cursor.fetchone())[0]
|
|
finally:
|
|
await db.close()
|
|
result["unresolved"] = unresolved
|
|
return result
|
|
|
|
@router.get("/missing-skus-csv")
|
|
async def export_missing_skus_csv():
|
|
"""Export missing SKUs as CSV compatible with mapping import (R8)."""
|
|
db = await get_sqlite()
|
|
try:
|
|
cursor = await db.execute("""
|
|
SELECT sku, product_name, first_seen, resolved
|
|
FROM missing_skus WHERE resolved = 0
|
|
ORDER BY first_seen DESC
|
|
""")
|
|
rows = await cursor.fetchall()
|
|
finally:
|
|
await db.close()
|
|
|
|
output = io.StringIO()
|
|
writer = csv.writer(output)
|
|
writer.writerow(["sku", "codmat", "cantitate_roa", "procent_pret", "product_name"])
|
|
for row in rows:
|
|
writer.writerow([row["sku"], "", "", "", row["product_name"] or ""])
|
|
|
|
return StreamingResponse(
|
|
io.BytesIO(output.getvalue().encode("utf-8-sig")),
|
|
media_type="text/csv",
|
|
headers={"Content-Disposition": "attachment; filename=missing_skus.csv"}
|
|
)
|