Replace broken asyncio.to_thread call with len(importable) which already represents orders ready to process. Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
157 lines
5.7 KiB
Python
157 lines
5.7 KiB
Python
import csv
|
|
import io
|
|
import json
|
|
from fastapi import APIRouter, Query
|
|
from fastapi.responses import StreamingResponse
|
|
|
|
from ..services import order_reader, validation_service, sqlite_service
|
|
from ..database import get_sqlite
|
|
|
|
router = APIRouter(prefix="/api/validate", tags=["validation"])
|
|
|
|
@router.post("/scan")
|
|
async def scan_and_validate():
|
|
"""Scan JSON files and validate all SKUs."""
|
|
orders, json_count = order_reader.read_json_orders()
|
|
|
|
if not orders:
|
|
return {
|
|
"orders": 0, "json_files": json_count, "skus": {}, "message": "No orders found",
|
|
"total_skus_scanned": 0, "new_missing": 0, "auto_resolved": 0, "unchanged": 0,
|
|
}
|
|
|
|
all_skus = order_reader.get_all_skus(orders)
|
|
result = validation_service.validate_skus(all_skus)
|
|
importable, skipped = validation_service.classify_orders(orders, result)
|
|
|
|
# Build SKU context from skipped orders and track missing SKUs
|
|
sku_context = {} # sku -> {order_numbers: [], customers: []}
|
|
for order, missing_list in skipped:
|
|
customer = order.billing.company_name or f"{order.billing.firstname} {order.billing.lastname}"
|
|
for sku in missing_list:
|
|
if sku not in sku_context:
|
|
sku_context[sku] = {"order_numbers": [], "customers": []}
|
|
sku_context[sku]["order_numbers"].append(order.number)
|
|
if customer not in sku_context[sku]["customers"]:
|
|
sku_context[sku]["customers"].append(customer)
|
|
|
|
new_missing = 0
|
|
for sku in result["missing"]:
|
|
# Find product name from orders
|
|
product_name = ""
|
|
for order in orders:
|
|
for item in order.items:
|
|
if item.sku == sku:
|
|
product_name = item.name
|
|
break
|
|
if product_name:
|
|
break
|
|
|
|
ctx = sku_context.get(sku, {})
|
|
tracked = await sqlite_service.track_missing_sku(
|
|
sku=sku,
|
|
product_name=product_name,
|
|
order_count=len(ctx.get("order_numbers", [])),
|
|
order_numbers=json.dumps(ctx.get("order_numbers", [])),
|
|
customers=json.dumps(ctx.get("customers", []))
|
|
)
|
|
if tracked:
|
|
new_missing += 1
|
|
|
|
total_skus_scanned = len(all_skus)
|
|
new_missing_count = len(result["missing"])
|
|
unchanged = total_skus_scanned - new_missing_count
|
|
|
|
return {
|
|
"json_files": json_count,
|
|
"total_orders": len(orders),
|
|
"total_skus": len(all_skus),
|
|
"importable": len(importable),
|
|
"skipped": len(skipped),
|
|
"new_orders": len(importable),
|
|
# Fields consumed by the rescan progress banner in missing_skus.html
|
|
"total_skus_scanned": total_skus_scanned,
|
|
"new_missing": new_missing_count,
|
|
"auto_resolved": 0,
|
|
"unchanged": unchanged,
|
|
"skus": {
|
|
"mapped": len(result["mapped"]),
|
|
"direct": len(result["direct"]),
|
|
"missing": len(result["missing"]),
|
|
"missing_list": sorted(result["missing"]),
|
|
"total_skus": len(all_skus),
|
|
"mapped_skus": len(result["mapped"]),
|
|
"direct_skus": len(result["direct"])
|
|
},
|
|
"skipped_orders": [
|
|
{
|
|
"number": order.number,
|
|
"customer": order.billing.company_name or f"{order.billing.firstname} {order.billing.lastname}",
|
|
"items_count": len(order.items),
|
|
"missing_skus": missing
|
|
}
|
|
for order, missing in skipped[:50] # limit to 50
|
|
]
|
|
}
|
|
|
|
@router.get("/missing-skus")
|
|
async def get_missing_skus(
|
|
page: int = Query(1, ge=1),
|
|
per_page: int = Query(20, ge=1, le=100),
|
|
resolved: int = Query(0, ge=-1, le=1),
|
|
search: str = Query(None)
|
|
):
|
|
"""Get paginated missing SKUs. resolved=-1 means show all (R10).
|
|
Optional search filters by sku or product_name."""
|
|
db = await get_sqlite()
|
|
try:
|
|
# Compute counts across ALL records (unfiltered by search)
|
|
cursor = await db.execute("SELECT COUNT(*) FROM missing_skus WHERE resolved = 0")
|
|
unresolved_count = (await cursor.fetchone())[0]
|
|
cursor = await db.execute("SELECT COUNT(*) FROM missing_skus WHERE resolved = 1")
|
|
resolved_count = (await cursor.fetchone())[0]
|
|
cursor = await db.execute("SELECT COUNT(*) FROM missing_skus")
|
|
total_count = (await cursor.fetchone())[0]
|
|
finally:
|
|
await db.close()
|
|
|
|
counts = {
|
|
"total": total_count,
|
|
"unresolved": unresolved_count,
|
|
"resolved": resolved_count,
|
|
}
|
|
|
|
result = await sqlite_service.get_missing_skus_paginated(page, per_page, resolved, search=search)
|
|
# Backward compat
|
|
result["unresolved"] = unresolved_count
|
|
result["counts"] = counts
|
|
# rename key for JS consistency
|
|
result["skus"] = result.get("missing_skus", [])
|
|
return result
|
|
|
|
@router.get("/missing-skus-csv")
|
|
async def export_missing_skus_csv():
|
|
"""Export missing SKUs as CSV compatible with mapping import (R8)."""
|
|
db = await get_sqlite()
|
|
try:
|
|
cursor = await db.execute("""
|
|
SELECT sku, product_name, first_seen, resolved
|
|
FROM missing_skus WHERE resolved = 0
|
|
ORDER BY first_seen DESC
|
|
""")
|
|
rows = await cursor.fetchall()
|
|
finally:
|
|
await db.close()
|
|
|
|
output = io.StringIO()
|
|
writer = csv.writer(output)
|
|
writer.writerow(["sku", "codmat", "cantitate_roa", "procent_pret", "product_name"])
|
|
for row in rows:
|
|
writer.writerow([row["sku"], "", "", "", row["product_name"] or ""])
|
|
|
|
return StreamingResponse(
|
|
io.BytesIO(output.getvalue().encode("utf-8-sig")),
|
|
media_type="text/csv",
|
|
headers={"Content-Disposition": "attachment; filename=missing_skus.csv"}
|
|
)
|