feat(dashboard): add logs page, pagination, quick mapping modal, price pre-validation
- Add /logs page with per-order sync run details, filters (Toate/Importate/Fara Mapare/Erori) - Add price pre-validation (validate_prices + ensure_prices) to prevent ORA-20000 on direct articles - Add find_new_orders() to detect orders not yet in Oracle COMENZI - Extend missing_skus table with order context (order_count, order_numbers, customers) - Add server-side pagination on /api/validate/missing-skus and /missing-skus page - Replace confusing "Skip"/"Err" with "Fara Mapare"/"Erori" terminology - Add inline mapping modal on dashboard (replaces navigation to /mappings) - Add 2-row stat cards: orders (Comenzi Noi/Ready/Importate/Fara Mapare/Erori) + articles - Add ID_POL/ID_GESTIUNE/ID_SECTIE to config.py and .env - Update .gitignore (venv, *.db, api/api/, logs/) - 33/33 unit tests pass, E2E verified with Playwright Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -59,14 +59,25 @@ async def add_import_order(sync_run_id: str, order_number: str, order_date: str,
|
||||
await db.close()
|
||||
|
||||
|
||||
async def track_missing_sku(sku: str, product_name: str = ""):
|
||||
"""Track a missing SKU."""
|
||||
async def track_missing_sku(sku: str, product_name: str = "",
|
||||
order_count: int = 0, order_numbers: str = None,
|
||||
customers: str = None):
|
||||
"""Track a missing SKU with order context."""
|
||||
db = await get_sqlite()
|
||||
try:
|
||||
await db.execute("""
|
||||
INSERT OR IGNORE INTO missing_skus (sku, product_name)
|
||||
VALUES (?, ?)
|
||||
""", (sku, product_name))
|
||||
# Update context columns (always update with latest data)
|
||||
if order_count or order_numbers or customers:
|
||||
await db.execute("""
|
||||
UPDATE missing_skus SET
|
||||
order_count = ?,
|
||||
order_numbers = ?,
|
||||
customers = ?
|
||||
WHERE sku = ?
|
||||
""", (order_count, order_numbers, customers, sku))
|
||||
await db.commit()
|
||||
finally:
|
||||
await db.close()
|
||||
@@ -85,6 +96,38 @@ async def resolve_missing_sku(sku: str):
|
||||
await db.close()
|
||||
|
||||
|
||||
async def get_missing_skus_paginated(page: int = 1, per_page: int = 20, resolved: int = 0):
|
||||
"""Get paginated missing SKUs."""
|
||||
db = await get_sqlite()
|
||||
try:
|
||||
offset = (page - 1) * per_page
|
||||
|
||||
cursor = await db.execute(
|
||||
"SELECT COUNT(*) FROM missing_skus WHERE resolved = ?", (resolved,)
|
||||
)
|
||||
total = (await cursor.fetchone())[0]
|
||||
|
||||
cursor = await db.execute("""
|
||||
SELECT sku, product_name, first_seen, resolved, resolved_at,
|
||||
order_count, order_numbers, customers
|
||||
FROM missing_skus
|
||||
WHERE resolved = ?
|
||||
ORDER BY order_count DESC, first_seen DESC
|
||||
LIMIT ? OFFSET ?
|
||||
""", (resolved, per_page, offset))
|
||||
rows = await cursor.fetchall()
|
||||
|
||||
return {
|
||||
"missing_skus": [dict(row) for row in rows],
|
||||
"total": total,
|
||||
"page": page,
|
||||
"per_page": per_page,
|
||||
"pages": (total + per_page - 1) // per_page if total > 0 else 0
|
||||
}
|
||||
finally:
|
||||
await db.close()
|
||||
|
||||
|
||||
async def get_sync_runs(page: int = 1, per_page: int = 20):
|
||||
"""Get paginated sync run history."""
|
||||
db = await get_sqlite()
|
||||
@@ -165,6 +208,17 @@ async def get_dashboard_stats():
|
||||
)
|
||||
missing = (await cursor.fetchone())[0]
|
||||
|
||||
# Article stats from last sync
|
||||
cursor = await db.execute("""
|
||||
SELECT COUNT(DISTINCT sku) FROM missing_skus
|
||||
""")
|
||||
total_missing_skus = (await cursor.fetchone())[0]
|
||||
|
||||
cursor = await db.execute("""
|
||||
SELECT COUNT(DISTINCT sku) FROM missing_skus WHERE resolved = 0
|
||||
""")
|
||||
unresolved_skus = (await cursor.fetchone())[0]
|
||||
|
||||
# Last sync run
|
||||
cursor = await db.execute("""
|
||||
SELECT * FROM sync_runs ORDER BY started_at DESC LIMIT 1
|
||||
@@ -176,6 +230,8 @@ async def get_dashboard_stats():
|
||||
"skipped": skipped,
|
||||
"errors": errors,
|
||||
"missing_skus": missing,
|
||||
"total_tracked_skus": total_missing_skus,
|
||||
"unresolved_skus": unresolved_skus,
|
||||
"last_run": dict(last_run) if last_run else None
|
||||
}
|
||||
finally:
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
|
||||
from . import order_reader, validation_service, import_service, sqlite_service
|
||||
from ..config import settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -52,12 +54,31 @@ async def run_sync(id_pol: int = None, id_sectie: int = None) -> dict:
|
||||
|
||||
_current_sync["progress"] = f"Validating {len(orders)} orders..."
|
||||
|
||||
# Step 2: Validate SKUs (blocking Oracle call -> run in thread)
|
||||
# Step 2a: Find new orders (not yet in Oracle)
|
||||
all_order_numbers = [o.number for o in orders]
|
||||
new_orders = await asyncio.to_thread(
|
||||
validation_service.find_new_orders, all_order_numbers
|
||||
)
|
||||
|
||||
# Step 2b: Validate SKUs (blocking Oracle call -> run in thread)
|
||||
all_skus = order_reader.get_all_skus(orders)
|
||||
validation = await asyncio.to_thread(validation_service.validate_skus, all_skus)
|
||||
importable, skipped = validation_service.classify_orders(orders, validation)
|
||||
|
||||
# Track missing SKUs
|
||||
# Step 2c: Build SKU context from skipped orders
|
||||
sku_context = {} # {sku: {"orders": [], "customers": []}}
|
||||
for order, missing_skus_list in skipped:
|
||||
customer = order.billing.company_name or \
|
||||
f"{order.billing.firstname} {order.billing.lastname}"
|
||||
for sku in missing_skus_list:
|
||||
if sku not in sku_context:
|
||||
sku_context[sku] = {"orders": [], "customers": []}
|
||||
if order.number not in sku_context[sku]["orders"]:
|
||||
sku_context[sku]["orders"].append(order.number)
|
||||
if customer not in sku_context[sku]["customers"]:
|
||||
sku_context[sku]["customers"].append(customer)
|
||||
|
||||
# Track missing SKUs with context
|
||||
for sku in validation["missing"]:
|
||||
product_name = ""
|
||||
for order in orders:
|
||||
@@ -67,7 +88,41 @@ async def run_sync(id_pol: int = None, id_sectie: int = None) -> dict:
|
||||
break
|
||||
if product_name:
|
||||
break
|
||||
await sqlite_service.track_missing_sku(sku, product_name)
|
||||
ctx = sku_context.get(sku, {})
|
||||
await sqlite_service.track_missing_sku(
|
||||
sku, product_name,
|
||||
order_count=len(ctx.get("orders", [])),
|
||||
order_numbers=json.dumps(ctx.get("orders", [])) if ctx.get("orders") else None,
|
||||
customers=json.dumps(ctx.get("customers", [])) if ctx.get("customers") else None,
|
||||
)
|
||||
|
||||
# Step 2d: Pre-validate prices for importable articles
|
||||
id_pol = id_pol or settings.ID_POL
|
||||
if id_pol and importable:
|
||||
_current_sync["progress"] = "Validating prices..."
|
||||
# Gather all CODMATs from importable orders
|
||||
all_codmats = set()
|
||||
for order in importable:
|
||||
for item in order.items:
|
||||
if item.sku in validation["mapped"]:
|
||||
# Mapped SKUs resolve to codmat via ARTICOLE_TERTI (handled by import)
|
||||
pass
|
||||
elif item.sku in validation["direct"]:
|
||||
all_codmats.add(item.sku)
|
||||
# For mapped SKUs, we'd need the ARTICOLE_TERTI lookup - direct SKUs = codmat
|
||||
if all_codmats:
|
||||
price_result = await asyncio.to_thread(
|
||||
validation_service.validate_prices, all_codmats, id_pol
|
||||
)
|
||||
if price_result["missing_price"]:
|
||||
logger.info(
|
||||
f"Auto-adding price 0 for {len(price_result['missing_price'])} "
|
||||
f"direct articles in policy {id_pol}"
|
||||
)
|
||||
await asyncio.to_thread(
|
||||
validation_service.ensure_prices,
|
||||
price_result["missing_price"], id_pol
|
||||
)
|
||||
|
||||
# Step 3: Record skipped orders
|
||||
for order, missing_skus in skipped:
|
||||
@@ -138,6 +193,7 @@ async def run_sync(id_pol: int = None, id_sectie: int = None) -> dict:
|
||||
"status": status,
|
||||
"json_files": json_count,
|
||||
"total_orders": len(orders),
|
||||
"new_orders": len(new_orders),
|
||||
"imported": imported_count,
|
||||
"skipped": len(skipped),
|
||||
"errors": error_count,
|
||||
|
||||
@@ -69,3 +69,123 @@ def classify_orders(orders, validation_result):
|
||||
importable.append(order)
|
||||
|
||||
return importable, skipped
|
||||
|
||||
def find_new_orders(order_numbers: list[str]) -> set[str]:
|
||||
"""Check which order numbers do NOT already exist in Oracle COMENZI.
|
||||
Returns: set of order numbers that are truly new (not yet imported).
|
||||
"""
|
||||
if not order_numbers:
|
||||
return set()
|
||||
|
||||
existing = set()
|
||||
num_list = list(order_numbers)
|
||||
|
||||
with database.pool.acquire() as conn:
|
||||
with conn.cursor() as cur:
|
||||
for i in range(0, len(num_list), 500):
|
||||
batch = num_list[i:i+500]
|
||||
placeholders = ",".join([f":o{j}" for j in range(len(batch))])
|
||||
params = {f"o{j}": num for j, num in enumerate(batch)}
|
||||
|
||||
cur.execute(f"""
|
||||
SELECT DISTINCT comanda_externa FROM COMENZI
|
||||
WHERE comanda_externa IN ({placeholders}) AND sters = 0
|
||||
""", params)
|
||||
for row in cur:
|
||||
existing.add(row[0])
|
||||
|
||||
new_orders = set(order_numbers) - existing
|
||||
logger.info(f"Order check: {len(new_orders)} new, {len(existing)} already exist out of {len(order_numbers)} total")
|
||||
return new_orders
|
||||
|
||||
def validate_prices(codmats: set[str], id_pol: int) -> dict:
|
||||
"""Check which CODMATs have a price entry in CRM_POLITICI_PRET_ART for the given policy.
|
||||
Returns: {"has_price": set_of_codmats, "missing_price": set_of_codmats}
|
||||
"""
|
||||
if not codmats:
|
||||
return {"has_price": set(), "missing_price": set()}
|
||||
|
||||
codmat_to_id = {}
|
||||
ids_with_price = set()
|
||||
codmat_list = list(codmats)
|
||||
|
||||
with database.pool.acquire() as conn:
|
||||
with conn.cursor() as cur:
|
||||
# Step 1: Get ID_ARTICOL for each CODMAT
|
||||
for i in range(0, len(codmat_list), 500):
|
||||
batch = codmat_list[i:i+500]
|
||||
placeholders = ",".join([f":c{j}" for j in range(len(batch))])
|
||||
params = {f"c{j}": cm for j, cm in enumerate(batch)}
|
||||
|
||||
cur.execute(f"""
|
||||
SELECT id_articol, codmat FROM NOM_ARTICOLE
|
||||
WHERE codmat IN ({placeholders})
|
||||
""", params)
|
||||
for row in cur:
|
||||
codmat_to_id[row[1]] = row[0]
|
||||
|
||||
# Step 2: Check which ID_ARTICOLs have a price in the policy
|
||||
id_list = list(codmat_to_id.values())
|
||||
for i in range(0, len(id_list), 500):
|
||||
batch = id_list[i:i+500]
|
||||
placeholders = ",".join([f":a{j}" for j in range(len(batch))])
|
||||
params = {f"a{j}": aid for j, aid in enumerate(batch)}
|
||||
params["id_pol"] = id_pol
|
||||
|
||||
cur.execute(f"""
|
||||
SELECT DISTINCT pa.ID_ARTICOL FROM CRM_POLITICI_PRET_ART pa
|
||||
WHERE pa.ID_POL = :id_pol AND pa.ID_ARTICOL IN ({placeholders})
|
||||
""", params)
|
||||
for row in cur:
|
||||
ids_with_price.add(row[0])
|
||||
|
||||
# Map back to CODMATs
|
||||
has_price = {cm for cm, aid in codmat_to_id.items() if aid in ids_with_price}
|
||||
missing_price = codmats - has_price
|
||||
|
||||
logger.info(f"Price validation (policy {id_pol}): {len(has_price)} have price, {len(missing_price)} missing price")
|
||||
return {"has_price": has_price, "missing_price": missing_price}
|
||||
|
||||
def ensure_prices(codmats: set[str], id_pol: int):
|
||||
"""Insert price 0 entries for CODMATs missing from the given price policy."""
|
||||
if not codmats:
|
||||
return
|
||||
|
||||
with database.pool.acquire() as conn:
|
||||
with conn.cursor() as cur:
|
||||
# Get ID_VALUTA for this policy
|
||||
cur.execute("""
|
||||
SELECT ID_VALUTA FROM CRM_POLITICI_PRETURI WHERE ID_POL = :id_pol
|
||||
""", {"id_pol": id_pol})
|
||||
row = cur.fetchone()
|
||||
if not row:
|
||||
logger.error(f"Price policy {id_pol} not found in CRM_POLITICI_PRETURI")
|
||||
return
|
||||
id_valuta = row[0]
|
||||
|
||||
for codmat in codmats:
|
||||
# Get ID_ARTICOL
|
||||
cur.execute("""
|
||||
SELECT id_articol FROM NOM_ARTICOLE WHERE codmat = :codmat
|
||||
""", {"codmat": codmat})
|
||||
row = cur.fetchone()
|
||||
if not row:
|
||||
logger.warning(f"CODMAT {codmat} not found in NOM_ARTICOLE, skipping price insert")
|
||||
continue
|
||||
id_articol = row[0]
|
||||
|
||||
cur.execute("""
|
||||
INSERT INTO CRM_POLITICI_PRET_ART
|
||||
(ID_POL_ART, ID_POL, ID_ARTICOL, PRET, ID_COMANDA, ID_VALUTA,
|
||||
ID_UTIL, DATAORA, PROC_TVAV, ID_PARTR, ID_PARTZ,
|
||||
PRETFTVA, PRETCTVA, CANTITATE, ID_UM, PRET_MIN, PRET_MIN_TVA)
|
||||
VALUES
|
||||
(SEQ_CRM_POLITICI_PRET_ART.NEXTVAL, :id_pol, :id_articol, 0, NULL, :id_valuta,
|
||||
-3, SYSDATE, 1.19, NULL, NULL,
|
||||
0, 0, 0, NULL, 0, 0)
|
||||
""", {"id_pol": id_pol, "id_articol": id_articol, "id_valuta": id_valuta})
|
||||
logger.info(f"Pret 0 adaugat pentru CODMAT {codmat} in politica {id_pol}")
|
||||
|
||||
conn.commit()
|
||||
|
||||
logger.info(f"Ensure prices done: {len(codmats)} CODMATs processed for policy {id_pol}")
|
||||
|
||||
Reference in New Issue
Block a user