- Add /logs page with per-order sync run details, filters (Toate/Importate/Fara Mapare/Erori) - Add price pre-validation (validate_prices + ensure_prices) to prevent ORA-20000 on direct articles - Add find_new_orders() to detect orders not yet in Oracle COMENZI - Extend missing_skus table with order context (order_count, order_numbers, customers) - Add server-side pagination on /api/validate/missing-skus and /missing-skus page - Replace confusing "Skip"/"Err" with "Fara Mapare"/"Erori" terminology - Add inline mapping modal on dashboard (replaces navigation to /mappings) - Add 2-row stat cards: orders (Comenzi Noi/Ready/Importate/Fara Mapare/Erori) + articles - Add ID_POL/ID_GESTIUNE/ID_SECTIE to config.py and .env - Update .gitignore (venv, *.db, api/api/, logs/) - 33/33 unit tests pass, E2E verified with Playwright Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
132 lines
4.6 KiB
Python
132 lines
4.6 KiB
Python
import asyncio
|
|
import csv
|
|
import io
|
|
import json
|
|
from fastapi import APIRouter, Query
|
|
from fastapi.responses import StreamingResponse
|
|
|
|
from ..services import order_reader, validation_service, sqlite_service
|
|
from ..database import get_sqlite
|
|
|
|
router = APIRouter(prefix="/api/validate", tags=["validation"])
|
|
|
|
@router.post("/scan")
|
|
async def scan_and_validate():
|
|
"""Scan JSON files and validate all SKUs."""
|
|
orders, json_count = order_reader.read_json_orders()
|
|
|
|
if not orders:
|
|
return {"orders": 0, "json_files": json_count, "skus": {}, "message": "No orders found"}
|
|
|
|
all_skus = order_reader.get_all_skus(orders)
|
|
result = validation_service.validate_skus(all_skus)
|
|
importable, skipped = validation_service.classify_orders(orders, result)
|
|
|
|
# Find new orders (not yet in Oracle)
|
|
all_order_numbers = [o.number for o in orders]
|
|
new_orders = await asyncio.to_thread(validation_service.find_new_orders, all_order_numbers)
|
|
|
|
# Build SKU context from skipped orders and track missing SKUs
|
|
sku_context = {} # sku -> {order_numbers: [], customers: []}
|
|
for order, missing_list in skipped:
|
|
customer = order.billing.company_name or f"{order.billing.firstname} {order.billing.lastname}"
|
|
for sku in missing_list:
|
|
if sku not in sku_context:
|
|
sku_context[sku] = {"order_numbers": [], "customers": []}
|
|
sku_context[sku]["order_numbers"].append(order.number)
|
|
if customer not in sku_context[sku]["customers"]:
|
|
sku_context[sku]["customers"].append(customer)
|
|
|
|
for sku in result["missing"]:
|
|
# Find product name from orders
|
|
product_name = ""
|
|
for order in orders:
|
|
for item in order.items:
|
|
if item.sku == sku:
|
|
product_name = item.name
|
|
break
|
|
if product_name:
|
|
break
|
|
|
|
ctx = sku_context.get(sku, {})
|
|
await sqlite_service.track_missing_sku(
|
|
sku=sku,
|
|
product_name=product_name,
|
|
order_count=len(ctx.get("order_numbers", [])),
|
|
order_numbers=json.dumps(ctx.get("order_numbers", [])),
|
|
customers=json.dumps(ctx.get("customers", []))
|
|
)
|
|
|
|
return {
|
|
"json_files": json_count,
|
|
"total_orders": len(orders),
|
|
"total_skus": len(all_skus),
|
|
"importable": len(importable),
|
|
"skipped": len(skipped),
|
|
"new_orders": len(new_orders),
|
|
"skus": {
|
|
"mapped": len(result["mapped"]),
|
|
"direct": len(result["direct"]),
|
|
"missing": len(result["missing"]),
|
|
"missing_list": sorted(result["missing"]),
|
|
"total_skus": len(all_skus),
|
|
"mapped_skus": len(result["mapped"]),
|
|
"direct_skus": len(result["direct"])
|
|
},
|
|
"skipped_orders": [
|
|
{
|
|
"number": order.number,
|
|
"customer": order.billing.company_name or f"{order.billing.firstname} {order.billing.lastname}",
|
|
"items_count": len(order.items),
|
|
"missing_skus": missing
|
|
}
|
|
for order, missing in skipped[:50] # limit to 50
|
|
]
|
|
}
|
|
|
|
@router.get("/missing-skus")
|
|
async def get_missing_skus(
|
|
page: int = Query(1, ge=1),
|
|
per_page: int = Query(20, ge=1, le=100),
|
|
resolved: int = Query(0, ge=0, le=1)
|
|
):
|
|
"""Get paginated missing SKUs."""
|
|
result = await sqlite_service.get_missing_skus_paginated(page, per_page, resolved)
|
|
# Backward compat: also include 'unresolved' count
|
|
db = await get_sqlite()
|
|
try:
|
|
cursor = await db.execute(
|
|
"SELECT COUNT(*) FROM missing_skus WHERE resolved = 0"
|
|
)
|
|
unresolved = (await cursor.fetchone())[0]
|
|
finally:
|
|
await db.close()
|
|
result["unresolved"] = unresolved
|
|
return result
|
|
|
|
@router.get("/missing-skus-csv")
|
|
async def export_missing_skus_csv():
|
|
"""Export missing SKUs as CSV."""
|
|
db = await get_sqlite()
|
|
try:
|
|
cursor = await db.execute("""
|
|
SELECT sku, product_name, first_seen, resolved
|
|
FROM missing_skus WHERE resolved = 0
|
|
ORDER BY first_seen DESC
|
|
""")
|
|
rows = await cursor.fetchall()
|
|
finally:
|
|
await db.close()
|
|
|
|
output = io.StringIO()
|
|
writer = csv.writer(output)
|
|
writer.writerow(["sku", "product_name", "first_seen"])
|
|
for row in rows:
|
|
writer.writerow([row["sku"], row["product_name"], row["first_seen"]])
|
|
|
|
return StreamingResponse(
|
|
io.BytesIO(output.getvalue().encode("utf-8-sig")),
|
|
media_type="text/csv",
|
|
headers={"Content-Disposition": "attachment; filename=missing_skus.csv"}
|
|
)
|