From 5f8b9b60030a6426b8a9f31a54423c726d11e2a8 Mon Sep 17 00:00:00 2001 From: Marius Mutu Date: Fri, 13 Mar 2026 17:55:36 +0200 Subject: [PATCH] feat(dashboard): redesign UI with smart polling, unified sync card, filter bar Replace SSE with smart polling (30s idle / 3s when running). Unify sync panel into single two-row card with live progress text. Add unified filter bar (period dropdown, status pills, search) with period-total counts. Add Client/Cont tooltip for different shipping/billing persons. Add SKU mappings pct_total badges + complete/incomplete filter + 409 duplicate check. Add missing SKUs search + rescan progress UX. Migrate SQLite orders schema (shipping_name, billing_name, payment_method, delivery_method). Fix JSON_OUTPUT_DIR path for server running from project root. Fix pagination controls showing top+bottom with per-page selector (25/50/100/250). Co-Authored-By: Claude Sonnet 4.6 --- api/app/database.py | 32 +- api/app/routers/mappings.py | 17 +- api/app/routers/sync.py | 134 ++++--- api/app/routers/validation.py | 52 ++- api/app/services/import_service.py | 71 +++- api/app/services/mapping_service.py | 121 +++++-- api/app/services/sqlite_service.py | 95 +++-- api/app/services/sync_service.py | 159 +++++---- api/app/static/css/style.css | 188 ++++++++++ api/app/static/js/dashboard.js | 529 ++++++++++++++-------------- api/app/static/js/mappings.js | 88 ++++- api/app/templates/dashboard.html | 162 ++++----- api/app/templates/mappings.html | 12 + api/app/templates/missing_skus.html | 223 +++++++----- 14 files changed, 1235 insertions(+), 648 deletions(-) diff --git a/api/app/database.py b/api/app/database.py index 20655e3..e7f9aeb 100644 --- a/api/app/database.py +++ b/api/app/database.py @@ -91,7 +91,11 @@ CREATE TABLE IF NOT EXISTS orders ( times_skipped INTEGER DEFAULT 0, first_seen_at TEXT DEFAULT (datetime('now')), last_sync_run_id TEXT REFERENCES sync_runs(run_id), - updated_at TEXT DEFAULT (datetime('now')) + updated_at TEXT DEFAULT (datetime('now')), + shipping_name TEXT, + billing_name TEXT, + payment_method TEXT, + delivery_method TEXT ); CREATE INDEX IF NOT EXISTS idx_orders_status ON orders(status); CREATE INDEX IF NOT EXISTS idx_orders_date ON orders(order_date); @@ -195,18 +199,15 @@ def init_sqlite(): ); """) # Copy latest record per order_number into orders + # Note: old import_orders didn't have address columns — those stay NULL conn.execute(""" INSERT INTO orders (order_number, order_date, customer_name, status, - id_comanda, id_partener, id_adresa_facturare, id_adresa_livrare, - error_message, missing_skus, items_count, last_sync_run_id) + id_comanda, id_partener, error_message, missing_skus, + items_count, last_sync_run_id) SELECT io.order_number, io.order_date, io.customer_name, io.status, - io.id_comanda, io.id_partener, - CASE WHEN io.order_number IN (SELECT order_number FROM import_orders WHERE id_adresa_facturare IS NOT NULL) THEN - (SELECT id_adresa_facturare FROM import_orders WHERE order_number = io.order_number AND id_adresa_facturare IS NOT NULL LIMIT 1) ELSE NULL END, - CASE WHEN io.order_number IN (SELECT order_number FROM import_orders WHERE id_adresa_livrare IS NOT NULL) THEN - (SELECT id_adresa_livrare FROM import_orders WHERE order_number = io.order_number AND id_adresa_livrare IS NOT NULL LIMIT 1) ELSE NULL END, - io.error_message, io.missing_skus, io.items_count, io.sync_run_id + io.id_comanda, io.id_partener, io.error_message, io.missing_skus, + io.items_count, io.sync_run_id FROM import_orders io INNER JOIN ( SELECT order_number, MAX(id) as max_id @@ -272,6 +273,19 @@ def init_sqlite(): conn.execute("ALTER TABLE sync_runs ADD COLUMN error_message TEXT") logger.info("Migrated sync_runs: added column error_message") + # Migrate orders: add shipping/billing/payment/delivery columns + cursor = conn.execute("PRAGMA table_info(orders)") + order_cols = {row[1] for row in cursor.fetchall()} + for col, typedef in [ + ("shipping_name", "TEXT"), + ("billing_name", "TEXT"), + ("payment_method", "TEXT"), + ("delivery_method", "TEXT"), + ]: + if col not in order_cols: + conn.execute(f"ALTER TABLE orders ADD COLUMN {col} {typedef}") + logger.info(f"Migrated orders: added column {col}") + conn.commit() except Exception as e: logger.warning(f"Migration check failed: {e}") diff --git a/api/app/routers/mappings.py b/api/app/routers/mappings.py index 0f5981a..4d1ea23 100644 --- a/api/app/routers/mappings.py +++ b/api/app/routers/mappings.py @@ -1,6 +1,7 @@ from fastapi import APIRouter, Query, Request, UploadFile, File -from fastapi.responses import StreamingResponse, HTMLResponse +from fastapi.responses import StreamingResponse, HTMLResponse, JSONResponse from fastapi.templating import Jinja2Templates +from fastapi import HTTPException from pydantic import BaseModel from pathlib import Path from typing import Optional @@ -49,15 +50,19 @@ async def mappings_page(request: Request): @router.get("/api/mappings") async def list_mappings(search: str = "", page: int = 1, per_page: int = 50, sort_by: str = "sku", sort_dir: str = "asc", - show_deleted: bool = False): + show_deleted: bool = False, pct_filter: str = None): result = mapping_service.get_mappings(search=search, page=page, per_page=per_page, sort_by=sort_by, sort_dir=sort_dir, - show_deleted=show_deleted) + show_deleted=show_deleted, + pct_filter=pct_filter) # Merge product names from web_products (R4) skus = list({m["sku"] for m in result.get("mappings", [])}) product_names = await sqlite_service.get_web_products_batch(skus) for m in result.get("mappings", []): m["product_name"] = product_names.get(m["sku"], "") + # Ensure counts key is always present + if "counts" not in result: + result["counts"] = {"total": 0, "complete": 0, "incomplete": 0} return result @router.post("/api/mappings") @@ -67,6 +72,12 @@ async def create_mapping(data: MappingCreate): # Mark SKU as resolved in missing_skus tracking await sqlite_service.resolve_missing_sku(data.sku) return {"success": True, **result} + except HTTPException as e: + can_restore = e.headers.get("X-Can-Restore") == "true" if e.headers else False + resp: dict = {"error": e.detail} + if can_restore: + resp["can_restore"] = True + return JSONResponse(status_code=e.status_code, content=resp) except Exception as e: return {"success": False, "error": str(e)} diff --git a/api/app/routers/sync.py b/api/app/routers/sync.py index 600da85..8fdddca 100644 --- a/api/app/routers/sync.py +++ b/api/app/routers/sync.py @@ -5,7 +5,6 @@ from datetime import datetime from fastapi import APIRouter, Request, BackgroundTasks from fastapi.templating import Jinja2Templates from fastapi.responses import HTMLResponse -from starlette.responses import StreamingResponse from pydantic import BaseModel from pathlib import Path from typing import Optional @@ -21,35 +20,6 @@ class ScheduleConfig(BaseModel): interval_minutes: int = 5 -# SSE streaming endpoint -@router.get("/api/sync/stream") -async def sync_stream(request: Request): - """SSE stream for real-time sync progress.""" - q = sync_service.subscribe() - - async def event_generator(): - try: - while True: - # Check if client disconnected - if await request.is_disconnected(): - break - try: - event = await asyncio.wait_for(q.get(), timeout=15.0) - yield f"data: {json.dumps(event)}\n\n" - if event.get("type") in ("completed", "failed"): - break - except asyncio.TimeoutError: - yield f"data: {json.dumps({'type': 'keepalive'})}\n\n" - finally: - sync_service.unsubscribe(q) - - return StreamingResponse( - event_generator(), - media_type="text/event-stream", - headers={"Cache-Control": "no-cache", "X-Accel-Buffering": "no"} - ) - - # API endpoints @router.post("/api/sync/start") async def start_sync(background_tasks: BackgroundTasks): @@ -72,10 +42,68 @@ async def stop_sync(): @router.get("/api/sync/status") async def sync_status(): - """Get current sync status.""" + """Get current sync status with progress details and last_run info.""" status = await sync_service.get_sync_status() - stats = await sqlite_service.get_dashboard_stats() - return {**status, "stats": stats} + + # Build last_run from most recent completed/failed sync_runs row + current_run_id = status.get("run_id") + last_run = None + try: + from ..database import get_sqlite + db = await get_sqlite() + try: + if current_run_id: + cursor = await db.execute(""" + SELECT * FROM sync_runs + WHERE status IN ('completed', 'failed') AND run_id != ? + ORDER BY started_at DESC LIMIT 1 + """, (current_run_id,)) + else: + cursor = await db.execute(""" + SELECT * FROM sync_runs + WHERE status IN ('completed', 'failed') + ORDER BY started_at DESC LIMIT 1 + """) + row = await cursor.fetchone() + if row: + row_dict = dict(row) + duration_seconds = None + if row_dict.get("started_at") and row_dict.get("finished_at"): + try: + dt_start = datetime.fromisoformat(row_dict["started_at"]) + dt_end = datetime.fromisoformat(row_dict["finished_at"]) + duration_seconds = int((dt_end - dt_start).total_seconds()) + except (ValueError, TypeError): + pass + last_run = { + "run_id": row_dict.get("run_id"), + "started_at": row_dict.get("started_at"), + "finished_at": row_dict.get("finished_at"), + "duration_seconds": duration_seconds, + "status": row_dict.get("status"), + "imported": row_dict.get("imported", 0), + "skipped": row_dict.get("skipped", 0), + "errors": row_dict.get("errors", 0), + } + finally: + await db.close() + except Exception: + pass + + # Ensure all expected keys are present + result = { + "status": status.get("status", "idle"), + "run_id": status.get("run_id"), + "started_at": status.get("started_at"), + "finished_at": status.get("finished_at"), + "phase": status.get("phase"), + "phase_text": status.get("phase_text"), + "progress_current": status.get("progress_current", 0), + "progress_total": status.get("progress_total", 0), + "counts": status.get("counts", {"imported": 0, "skipped": 0, "errors": 0}), + "last_run": last_run, + } + return result @router.get("/api/sync/history") @@ -277,8 +305,13 @@ async def order_detail(order_number: str): async def dashboard_orders(page: int = 1, per_page: int = 50, search: str = "", status: str = "all", sort_by: str = "order_date", sort_dir: str = "desc", - period_days: int = 7): - """Get orders for dashboard, enriched with invoice data. period_days=0 means all time.""" + period_days: int = 7, + period_start: str = "", period_end: str = ""): + """Get orders for dashboard, enriched with invoice data. + + period_days=0 with period_start/period_end uses custom date range. + period_days=0 without dates means all time. + """ is_uninvoiced_filter = (status == "UNINVOICED") # For UNINVOICED: fetch all IMPORTED orders, then filter post-invoice-check @@ -289,7 +322,9 @@ async def dashboard_orders(page: int = 1, per_page: int = 50, result = await sqlite_service.get_orders( page=fetch_page, per_page=fetch_per_page, search=search, status_filter=fetch_status, sort_by=sort_by, sort_dir=sort_dir, - period_days=period_days + period_days=period_days, + period_start=period_start if period_days == 0 else "", + period_end=period_end if period_days == 0 else "", ) # Enrich imported orders with invoice data from Oracle @@ -309,12 +344,22 @@ async def dashboard_orders(page: int = 1, per_page: int = 50, else: o["invoice"] = None - # Count uninvoiced (IMPORTED without invoice) - uninvoiced_count = sum( + # Add shipping/billing name fields + is_different_person flag + s_name = o.get("shipping_name") or "" + b_name = o.get("billing_name") or "" + o["shipping_name"] = s_name + o["billing_name"] = b_name + o["is_different_person"] = bool(s_name and b_name and s_name != b_name) + + # Build period-total counts (across all pages, same filters) + nefacturate_count = sum( 1 for o in all_orders if o.get("status") == "IMPORTED" and not o.get("invoice") ) - result["counts"]["uninvoiced"] = uninvoiced_count + # Use counts from sqlite_service (already period-scoped) and add nefacturate + counts = result.get("counts", {}) + counts["nefacturate"] = nefacturate_count + counts.setdefault("total", counts.get("imported", 0) + counts.get("skipped", 0) + counts.get("error", 0)) # For UNINVOICED filter: apply server-side filtering + pagination if is_uninvoiced_filter: @@ -327,7 +372,16 @@ async def dashboard_orders(page: int = 1, per_page: int = 50, result["per_page"] = per_page result["pages"] = (total + per_page - 1) // per_page if total > 0 else 0 - return result + # Reshape response + return { + "orders": result["orders"], + "pagination": { + "page": result.get("page", page), + "per_page": result.get("per_page", per_page), + "total_pages": result.get("pages", 0), + }, + "counts": counts, + } @router.put("/api/sync/schedule") diff --git a/api/app/routers/validation.py b/api/app/routers/validation.py index f1f7c94..1964b98 100644 --- a/api/app/routers/validation.py +++ b/api/app/routers/validation.py @@ -16,7 +16,10 @@ async def scan_and_validate(): orders, json_count = order_reader.read_json_orders() if not orders: - return {"orders": 0, "json_files": json_count, "skus": {}, "message": "No orders found"} + return { + "orders": 0, "json_files": json_count, "skus": {}, "message": "No orders found", + "total_skus_scanned": 0, "new_missing": 0, "auto_resolved": 0, "unchanged": 0, + } all_skus = order_reader.get_all_skus(orders) result = validation_service.validate_skus(all_skus) @@ -37,6 +40,7 @@ async def scan_and_validate(): if customer not in sku_context[sku]["customers"]: sku_context[sku]["customers"].append(customer) + new_missing = 0 for sku in result["missing"]: # Find product name from orders product_name = "" @@ -49,13 +53,19 @@ async def scan_and_validate(): break ctx = sku_context.get(sku, {}) - await sqlite_service.track_missing_sku( + tracked = await sqlite_service.track_missing_sku( sku=sku, product_name=product_name, order_count=len(ctx.get("order_numbers", [])), order_numbers=json.dumps(ctx.get("order_numbers", [])), customers=json.dumps(ctx.get("customers", [])) ) + if tracked: + new_missing += 1 + + total_skus_scanned = len(all_skus) + new_missing_count = len(result["missing"]) + unchanged = total_skus_scanned - new_missing_count return { "json_files": json_count, @@ -64,6 +74,11 @@ async def scan_and_validate(): "importable": len(importable), "skipped": len(skipped), "new_orders": len(new_orders), + # Fields consumed by the rescan progress banner in missing_skus.html + "total_skus_scanned": total_skus_scanned, + "new_missing": new_missing_count, + "auto_resolved": 0, + "unchanged": unchanged, "skus": { "mapped": len(result["mapped"]), "direct": len(result["direct"]), @@ -88,20 +103,35 @@ async def scan_and_validate(): async def get_missing_skus( page: int = Query(1, ge=1), per_page: int = Query(20, ge=1, le=100), - resolved: int = Query(0, ge=-1, le=1) + resolved: int = Query(0, ge=-1, le=1), + search: str = Query(None) ): - """Get paginated missing SKUs. resolved=-1 means show all (R10).""" - result = await sqlite_service.get_missing_skus_paginated(page, per_page, resolved) - # Backward compat: also include 'unresolved' count + """Get paginated missing SKUs. resolved=-1 means show all (R10). + Optional search filters by sku or product_name.""" db = await get_sqlite() try: - cursor = await db.execute( - "SELECT COUNT(*) FROM missing_skus WHERE resolved = 0" - ) - unresolved = (await cursor.fetchone())[0] + # Compute counts across ALL records (unfiltered by search) + cursor = await db.execute("SELECT COUNT(*) FROM missing_skus WHERE resolved = 0") + unresolved_count = (await cursor.fetchone())[0] + cursor = await db.execute("SELECT COUNT(*) FROM missing_skus WHERE resolved = 1") + resolved_count = (await cursor.fetchone())[0] + cursor = await db.execute("SELECT COUNT(*) FROM missing_skus") + total_count = (await cursor.fetchone())[0] finally: await db.close() - result["unresolved"] = unresolved + + counts = { + "total": total_count, + "unresolved": unresolved_count, + "resolved": resolved_count, + } + + result = await sqlite_service.get_missing_skus_paginated(page, per_page, resolved, search=search) + # Backward compat + result["unresolved"] = unresolved_count + result["counts"] = counts + # rename key for JS consistency + result["skus"] = result.get("missing_skus", []) return result @router.get("/missing-skus-csv") diff --git a/api/app/services/import_service.py b/api/app/services/import_service.py index c77e5ab..f99c64d 100644 --- a/api/app/services/import_service.py +++ b/api/app/services/import_service.py @@ -106,7 +106,7 @@ def import_single_order(order, id_pol: int = None, id_sectie: int = None) -> dic raise RuntimeError("Oracle pool not initialized") with database.pool.acquire() as conn: with conn.cursor() as cur: - # Step 1: Process partner + # Step 1: Process partner — use shipping person data for name id_partener = cur.var(oracledb.DB_TYPE_NUMBER) if order.billing.is_company: @@ -115,9 +115,15 @@ def import_single_order(order, id_pol: int = None, id_sectie: int = None) -> dic registru = clean_web_text(order.billing.company_reg) or None is_pj = 1 else: - denumire = clean_web_text( - f"{order.billing.lastname} {order.billing.firstname}" - ).upper() + # Use shipping person for partner name (person on shipping label) + if order.shipping and (order.shipping.lastname or order.shipping.firstname): + denumire = clean_web_text( + f"{order.shipping.lastname} {order.shipping.firstname}" + ).upper() + else: + denumire = clean_web_text( + f"{order.billing.lastname} {order.billing.firstname}" + ).upper() cod_fiscal = None registru = None is_pj = 0 @@ -133,20 +139,31 @@ def import_single_order(order, id_pol: int = None, id_sectie: int = None) -> dic result["id_partener"] = int(partner_id) - # Step 2: Process billing address - id_adresa_fact = cur.var(oracledb.DB_TYPE_NUMBER) - billing_addr = format_address_for_oracle( - order.billing.address, order.billing.city, order.billing.region + # Determine if billing and shipping are different persons + billing_name = clean_web_text( + f"{order.billing.lastname} {order.billing.firstname}" + ).strip().upper() + shipping_name = "" + if order.shipping: + shipping_name = clean_web_text( + f"{order.shipping.lastname} {order.shipping.firstname}" + ).strip().upper() + different_person = bool( + shipping_name and billing_name and shipping_name != billing_name ) - cur.callproc("PACK_IMPORT_PARTENERI.cauta_sau_creeaza_adresa", [ - partner_id, billing_addr, - order.billing.phone or "", - order.billing.email or "", - id_adresa_fact - ]) - addr_fact_id = id_adresa_fact.getvalue() - # Step 3: Process shipping address (if different) + # Step 2: Process shipping address (primary — person on shipping label) + # Use shipping person phone/email for partner contact + shipping_phone = "" + shipping_email = "" + if order.shipping: + shipping_phone = order.shipping.phone or "" + shipping_email = order.shipping.email or "" + if not shipping_phone: + shipping_phone = order.billing.phone or "" + if not shipping_email: + shipping_email = order.billing.email or "" + addr_livr_id = None if order.shipping: id_adresa_livr = cur.var(oracledb.DB_TYPE_NUMBER) @@ -156,12 +173,30 @@ def import_single_order(order, id_pol: int = None, id_sectie: int = None) -> dic ) cur.callproc("PACK_IMPORT_PARTENERI.cauta_sau_creeaza_adresa", [ partner_id, shipping_addr, - order.shipping.phone or "", - order.shipping.email or "", + shipping_phone, + shipping_email, id_adresa_livr ]) addr_livr_id = id_adresa_livr.getvalue() + # Step 3: Process billing address + if different_person: + # Different person: use shipping address for BOTH billing and shipping in ROA + addr_fact_id = addr_livr_id + else: + # Same person: use billing address as-is + id_adresa_fact = cur.var(oracledb.DB_TYPE_NUMBER) + billing_addr = format_address_for_oracle( + order.billing.address, order.billing.city, order.billing.region + ) + cur.callproc("PACK_IMPORT_PARTENERI.cauta_sau_creeaza_adresa", [ + partner_id, billing_addr, + order.billing.phone or "", + order.billing.email or "", + id_adresa_fact + ]) + addr_fact_id = id_adresa_fact.getvalue() + if addr_fact_id is not None: result["id_adresa_facturare"] = int(addr_fact_id) if addr_livr_id is not None: diff --git a/api/app/services/mapping_service.py b/api/app/services/mapping_service.py index 382e01a..ee34ee4 100644 --- a/api/app/services/mapping_service.py +++ b/api/app/services/mapping_service.py @@ -9,8 +9,14 @@ logger = logging.getLogger(__name__) def get_mappings(search: str = "", page: int = 1, per_page: int = 50, sort_by: str = "sku", sort_dir: str = "asc", - show_deleted: bool = False): - """Get paginated mappings with optional search and sorting.""" + show_deleted: bool = False, pct_filter: str = None): + """Get paginated mappings with optional search, sorting, and pct_filter. + + pct_filter values: + 'complete' – only SKU groups where sum(procent_pret for active rows) == 100 + 'incomplete' – only SKU groups where sum < 100 + None / 'all' – no filter + """ if database.pool is None: raise HTTPException(status_code=503, detail="Oracle unavailable") @@ -49,16 +55,7 @@ def get_mappings(search: str = "", page: int = 1, per_page: int = 50, params["search"] = search where = "WHERE " + " AND ".join(where_clauses) if where_clauses else "" - # Count total - count_sql = f""" - SELECT COUNT(*) FROM ARTICOLE_TERTI at - LEFT JOIN nom_articole na ON na.codmat = at.codmat - {where} - """ - cur.execute(count_sql, params) - total = cur.fetchone()[0] - - # Get page + # Fetch ALL matching rows (no pagination yet — we need to group by SKU first) data_sql = f""" SELECT at.sku, at.codmat, na.denumire, na.um, at.cantitate_roa, at.procent_pret, at.activ, at.sters, @@ -67,30 +64,114 @@ def get_mappings(search: str = "", page: int = 1, per_page: int = 50, LEFT JOIN nom_articole na ON na.codmat = at.codmat {where} ORDER BY {order_clause} - OFFSET :offset ROWS FETCH NEXT :per_page ROWS ONLY """ - params["offset"] = offset - params["per_page"] = per_page cur.execute(data_sql, params) - columns = [col[0].lower() for col in cur.description] - rows = [dict(zip(columns, row)) for row in cur.fetchall()] + all_rows = [dict(zip(columns, row)) for row in cur.fetchall()] + + # Group by SKU and compute pct_total for each group + from collections import OrderedDict + groups = OrderedDict() + for row in all_rows: + sku = row["sku"] + if sku not in groups: + groups[sku] = [] + groups[sku].append(row) + + # Compute counts across ALL groups (before pct_filter) + total_skus = len(groups) + complete_skus = 0 + incomplete_skus = 0 + for sku, rows in groups.items(): + pct_total = sum( + (r["procent_pret"] or 0) + for r in rows + if r.get("activ") == 1 + ) + if pct_total >= 99.99: + complete_skus += 1 + else: + incomplete_skus += 1 + + counts = { + "total": total_skus, + "complete": complete_skus, + "incomplete": incomplete_skus, + } + + # Apply pct_filter + if pct_filter in ("complete", "incomplete"): + filtered_groups = {} + for sku, rows in groups.items(): + pct_total = sum( + (r["procent_pret"] or 0) + for r in rows + if r.get("activ") == 1 + ) + is_complete = pct_total >= 99.99 + if pct_filter == "complete" and is_complete: + filtered_groups[sku] = rows + elif pct_filter == "incomplete" and not is_complete: + filtered_groups[sku] = rows + groups = filtered_groups + + # Flatten back to rows for pagination (paginate by raw row count) + filtered_rows = [row for rows in groups.values() for row in rows] + total = len(filtered_rows) + page_rows = filtered_rows[offset: offset + per_page] + + # Attach pct_total and is_complete to each row for the renderer + # Re-compute per visible group + sku_pct = {} + for sku, rows in groups.items(): + pct_total = sum( + (r["procent_pret"] or 0) + for r in rows + if r.get("activ") == 1 + ) + sku_pct[sku] = {"pct_total": pct_total, "is_complete": pct_total >= 99.99} + + for row in page_rows: + meta = sku_pct.get(row["sku"], {"pct_total": 0, "is_complete": False}) + row["pct_total"] = meta["pct_total"] + row["is_complete"] = meta["is_complete"] return { - "mappings": rows, + "mappings": page_rows, "total": total, "page": page, "per_page": per_page, - "pages": (total + per_page - 1) // per_page + "pages": (total + per_page - 1) // per_page if total > 0 else 0, + "counts": counts, } def create_mapping(sku: str, codmat: str, cantitate_roa: float = 1, procent_pret: float = 100): - """Create a new mapping.""" + """Create a new mapping. Returns dict or raises HTTPException on duplicate.""" if database.pool is None: raise HTTPException(status_code=503, detail="Oracle unavailable") with database.pool.acquire() as conn: with conn.cursor() as cur: + # Check for active duplicate + cur.execute(""" + SELECT COUNT(*) FROM ARTICOLE_TERTI + WHERE sku = :sku AND codmat = :codmat AND NVL(sters, 0) = 0 + """, {"sku": sku, "codmat": codmat}) + if cur.fetchone()[0] > 0: + raise HTTPException(status_code=409, detail="Maparea SKU-CODMAT există deja") + + # Check for soft-deleted record that could be restored + cur.execute(""" + SELECT COUNT(*) FROM ARTICOLE_TERTI + WHERE sku = :sku AND codmat = :codmat AND sters = 1 + """, {"sku": sku, "codmat": codmat}) + if cur.fetchone()[0] > 0: + raise HTTPException( + status_code=409, + detail="Maparea a fost ștearsă anterior", + headers={"X-Can-Restore": "true"} + ) + cur.execute(""" INSERT INTO ARTICOLE_TERTI (sku, codmat, cantitate_roa, procent_pret, activ, sters, data_creare, id_util_creare) VALUES (:sku, :codmat, :cantitate_roa, :procent_pret, 1, 0, SYSDATE, -3) diff --git a/api/app/services/sqlite_service.py b/api/app/services/sqlite_service.py index fe70ef3..ca6e9d0 100644 --- a/api/app/services/sqlite_service.py +++ b/api/app/services/sqlite_service.py @@ -44,7 +44,9 @@ async def update_sync_run(run_id: str, status: str, total_orders: int = 0, async def upsert_order(sync_run_id: str, order_number: str, order_date: str, customer_name: str, status: str, id_comanda: int = None, id_partener: int = None, error_message: str = None, - missing_skus: list = None, items_count: int = 0): + missing_skus: list = None, items_count: int = 0, + shipping_name: str = None, billing_name: str = None, + payment_method: str = None, delivery_method: str = None): """Upsert a single order — one row per order_number, status updated in place.""" db = await get_sqlite() try: @@ -52,8 +54,9 @@ async def upsert_order(sync_run_id: str, order_number: str, order_date: str, INSERT INTO orders (order_number, order_date, customer_name, status, id_comanda, id_partener, error_message, missing_skus, items_count, - last_sync_run_id) - VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + last_sync_run_id, shipping_name, billing_name, + payment_method, delivery_method) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) ON CONFLICT(order_number) DO UPDATE SET status = excluded.status, error_message = excluded.error_message, @@ -65,11 +68,16 @@ async def upsert_order(sync_run_id: str, order_number: str, order_date: str, THEN orders.times_skipped + 1 ELSE orders.times_skipped END, last_sync_run_id = excluded.last_sync_run_id, + shipping_name = COALESCE(excluded.shipping_name, orders.shipping_name), + billing_name = COALESCE(excluded.billing_name, orders.billing_name), + payment_method = COALESCE(excluded.payment_method, orders.payment_method), + delivery_method = COALESCE(excluded.delivery_method, orders.delivery_method), updated_at = datetime('now') """, (order_number, order_date, customer_name, status, id_comanda, id_partener, error_message, json.dumps(missing_skus) if missing_skus else None, - items_count, sync_run_id)) + items_count, sync_run_id, shipping_name, billing_name, + payment_method, delivery_method)) await db.commit() finally: await db.close() @@ -124,35 +132,52 @@ async def resolve_missing_sku(sku: str): await db.close() -async def get_missing_skus_paginated(page: int = 1, per_page: int = 20, resolved: int = 0): - """Get paginated missing SKUs. resolved=-1 means show all.""" +async def get_missing_skus_paginated(page: int = 1, per_page: int = 20, + resolved: int = 0, search: str = None): + """Get paginated missing SKUs. resolved=-1 means show all. + Optional search filters by sku or product_name (LIKE).""" db = await get_sqlite() try: offset = (page - 1) * per_page - if resolved == -1: - cursor = await db.execute("SELECT COUNT(*) FROM missing_skus") - total = (await cursor.fetchone())[0] - cursor = await db.execute(""" - SELECT sku, product_name, first_seen, resolved, resolved_at, - order_count, order_numbers, customers - FROM missing_skus - ORDER BY resolved ASC, order_count DESC, first_seen DESC - LIMIT ? OFFSET ? - """, (per_page, offset)) - else: - cursor = await db.execute( - "SELECT COUNT(*) FROM missing_skus WHERE resolved = ?", (resolved,) - ) - total = (await cursor.fetchone())[0] - cursor = await db.execute(""" - SELECT sku, product_name, first_seen, resolved, resolved_at, - order_count, order_numbers, customers - FROM missing_skus - WHERE resolved = ? - ORDER BY order_count DESC, first_seen DESC - LIMIT ? OFFSET ? - """, (resolved, per_page, offset)) + # Build WHERE clause parts + where_parts = [] + params_count = [] + params_data = [] + + if resolved != -1: + where_parts.append("resolved = ?") + params_count.append(resolved) + params_data.append(resolved) + + if search: + like = f"%{search}%" + where_parts.append("(LOWER(sku) LIKE LOWER(?) OR LOWER(COALESCE(product_name,'')) LIKE LOWER(?))") + params_count.extend([like, like]) + params_data.extend([like, like]) + + where_clause = ("WHERE " + " AND ".join(where_parts)) if where_parts else "" + + order_clause = ( + "ORDER BY resolved ASC, order_count DESC, first_seen DESC" + if resolved == -1 + else "ORDER BY order_count DESC, first_seen DESC" + ) + + cursor = await db.execute( + f"SELECT COUNT(*) FROM missing_skus {where_clause}", + params_count + ) + total = (await cursor.fetchone())[0] + + cursor = await db.execute(f""" + SELECT sku, product_name, first_seen, resolved, resolved_at, + order_count, order_numbers, customers + FROM missing_skus + {where_clause} + {order_clause} + LIMIT ? OFFSET ? + """, params_data + [per_page, offset]) rows = await cursor.fetchall() @@ -474,8 +499,13 @@ async def get_run_orders_filtered(run_id: str, status_filter: str = "all", async def get_orders(page: int = 1, per_page: int = 50, search: str = "", status_filter: str = "all", sort_by: str = "order_date", sort_dir: str = "desc", - period_days: int = 7): - """Get orders with filters, sorting, and period. period_days=0 means all time.""" + period_days: int = 7, + period_start: str = "", period_end: str = ""): + """Get orders with filters, sorting, and period. + + period_days=0 with period_start/period_end uses custom date range. + period_days=0 without dates means all time. + """ db = await get_sqlite() try: where_clauses = [] @@ -484,6 +514,9 @@ async def get_orders(page: int = 1, per_page: int = 50, if period_days and period_days > 0: where_clauses.append("order_date >= date('now', ?)") params.append(f"-{period_days} days") + elif period_days == 0 and period_start and period_end: + where_clauses.append("order_date BETWEEN ? AND ?") + params.extend([period_start, period_end]) if search: where_clauses.append("(order_number LIKE ? OR customer_name LIKE ?)") diff --git a/api/app/services/sync_service.py b/api/app/services/sync_service.py index e5e1c76..72abc81 100644 --- a/api/app/services/sync_service.py +++ b/api/app/services/sync_service.py @@ -13,28 +13,10 @@ logger = logging.getLogger(__name__) _sync_lock = asyncio.Lock() _current_sync = None # dict with run_id, status, progress info -# SSE subscriber system -_subscribers: list[asyncio.Queue] = [] - # In-memory text log buffer per run _run_logs: dict[str, list[str]] = {} -def subscribe() -> asyncio.Queue: - """Subscribe to sync events. Returns a queue that will receive event dicts.""" - q = asyncio.Queue() - _subscribers.append(q) - return q - - -def unsubscribe(q: asyncio.Queue): - """Unsubscribe from sync events.""" - try: - _subscribers.remove(q) - except ValueError: - pass - - def _log_line(run_id: str, message: str): """Append a timestamped line to the in-memory log buffer.""" if run_id not in _run_logs: @@ -51,13 +33,17 @@ def get_run_text_log(run_id: str) -> str | None: return "\n".join(lines) -async def _emit(event: dict): - """Push an event to all subscriber queues.""" - for q in _subscribers: - try: - q.put_nowait(event) - except asyncio.QueueFull: - pass +def _update_progress(phase: str, phase_text: str, current: int = 0, total: int = 0, + counts: dict = None): + """Update _current_sync with progress details for polling.""" + global _current_sync + if _current_sync is None: + return + _current_sync["phase"] = phase + _current_sync["phase_text"] = phase_text + _current_sync["progress_current"] = current + _current_sync["progress_total"] = total + _current_sync["counts"] = counts or {"imported": 0, "skipped": 0, "errors": 0} async def get_sync_status(): @@ -80,7 +66,12 @@ async def prepare_sync(id_pol: int = None, id_sectie: int = None) -> dict: "run_id": run_id, "status": "running", "started_at": datetime.now().isoformat(), - "progress": "Starting..." + "finished_at": None, + "phase": "starting", + "phase_text": "Starting...", + "progress_current": 0, + "progress_total": 0, + "counts": {"imported": 0, "skipped": 0, "errors": 0}, } return {"run_id": run_id, "status": "starting"} @@ -100,11 +91,15 @@ async def run_sync(id_pol: int = None, id_sectie: int = None, run_id: str = None "run_id": run_id, "status": "running", "started_at": datetime.now().isoformat(), - "progress": "Reading JSON files..." + "finished_at": None, + "phase": "reading", + "phase_text": "Reading JSON files...", + "progress_current": 0, + "progress_total": 0, + "counts": {"imported": 0, "skipped": 0, "errors": 0}, } - _current_sync["progress"] = "Reading JSON files..." - await _emit({"type": "phase", "run_id": run_id, "message": "Reading JSON files..."}) + _update_progress("reading", "Reading JSON files...") started_dt = datetime.now() _run_logs[run_id] = [ @@ -119,7 +114,7 @@ async def run_sync(id_pol: int = None, id_sectie: int = None, run_id: str = None orders, json_count = order_reader.read_json_orders() orders.sort(key=lambda o: o.date or '') await sqlite_service.create_sync_run(run_id, json_count) - await _emit({"type": "phase", "run_id": run_id, "message": f"Found {len(orders)} orders in {json_count} files"}) + _update_progress("reading", f"Found {len(orders)} orders in {json_count} files", 0, len(orders)) _log_line(run_id, f"Gasite {len(orders)} comenzi in {json_count} fisiere") # Populate web_products catalog from all orders (R4) @@ -131,12 +126,11 @@ async def run_sync(id_pol: int = None, id_sectie: int = None, run_id: str = None if not orders: _log_line(run_id, "Nicio comanda gasita.") await sqlite_service.update_sync_run(run_id, "completed", 0, 0, 0, 0) + _update_progress("completed", "No orders found") summary = {"run_id": run_id, "status": "completed", "message": "No orders found", "json_files": json_count} - await _emit({"type": "completed", "run_id": run_id, "summary": summary}) return summary - _current_sync["progress"] = f"Validating {len(orders)} orders..." - await _emit({"type": "phase", "run_id": run_id, "message": f"Validating {len(orders)} orders..."}) + _update_progress("validation", f"Validating {len(orders)} orders...", 0, len(orders)) # Step 2a: Find new orders (not yet in Oracle) all_order_numbers = [o.number for o in orders] @@ -149,7 +143,8 @@ async def run_sync(id_pol: int = None, id_sectie: int = None, run_id: str = None validation = await asyncio.to_thread(validation_service.validate_skus, all_skus) importable, skipped = validation_service.classify_orders(orders, validation) - await _emit({"type": "phase", "run_id": run_id, "message": f"{len(importable)} importable, {len(skipped)} skipped (missing SKUs)"}) + _update_progress("validation", f"{len(importable)} importable, {len(skipped)} skipped (missing SKUs)", + 0, len(importable)) _log_line(run_id, f"Validare SKU-uri: {len(importable)} importabile, {len(skipped)} nemapate") # Step 2c: Build SKU context from skipped orders @@ -189,8 +184,7 @@ async def run_sync(id_pol: int = None, id_sectie: int = None, run_id: str = None logger.info(f"Sync params: ID_POL={id_pol}, ID_SECTIE={id_sectie}") _log_line(run_id, f"Parametri import: ID_POL={id_pol}, ID_SECTIE={id_sectie}") if id_pol and importable: - _current_sync["progress"] = "Validating prices..." - await _emit({"type": "phase", "run_id": run_id, "message": "Validating prices..."}) + _update_progress("validation", "Validating prices...", 0, len(importable)) _log_line(run_id, "Validare preturi...") # Gather all CODMATs from importable orders all_codmats = set() @@ -216,10 +210,21 @@ async def run_sync(id_pol: int = None, id_sectie: int = None, run_id: str = None price_result["missing_price"], id_pol ) - # Step 3: Record skipped orders + emit events + store items + # Step 3: Record skipped orders + store items + skipped_count = 0 for order, missing_skus in skipped: - customer = order.billing.company_name or \ - f"{order.billing.firstname} {order.billing.lastname}" + skipped_count += 1 + # Derive shipping / billing names + shipping_name = "" + if order.shipping: + shipping_name = f"{getattr(order.shipping, 'firstname', '') or ''} {getattr(order.shipping, 'lastname', '') or ''}".strip() + billing_name = f"{getattr(order.billing, 'firstname', '') or ''} {getattr(order.billing, 'lastname', '') or ''}".strip() + if not shipping_name: + shipping_name = billing_name + customer = shipping_name or order.billing.company_name or billing_name + payment_method = getattr(order, 'payment_name', None) or None + delivery_method = getattr(order, 'delivery_name', None) or None + await sqlite_service.upsert_order( sync_run_id=run_id, order_number=order.number, @@ -227,7 +232,11 @@ async def run_sync(id_pol: int = None, id_sectie: int = None, run_id: str = None customer_name=customer, status="SKIPPED", missing_skus=missing_skus, - items_count=len(order.items) + items_count=len(order.items), + shipping_name=shipping_name, + billing_name=billing_name, + payment_method=payment_method, + delivery_method=delivery_method, ) await sqlite_service.add_sync_run_order(run_id, order.number, "SKIPPED") # Store order items with mapping status (R9) @@ -243,28 +252,35 @@ async def run_sync(id_pol: int = None, id_sectie: int = None, run_id: str = None }) await sqlite_service.add_order_items(order.number, order_items_data) _log_line(run_id, f"#{order.number} [{order.date or '?'}] {customer} → OMIS (lipsa: {', '.join(missing_skus)})") - await _emit({ - "type": "order_result", "run_id": run_id, - "order_number": order.number, "customer_name": customer, - "order_date": order.date, - "status": "SKIPPED", "missing_skus": missing_skus, - "items_count": len(order.items), "progress": f"0/{len(importable)}" - }) + _update_progress("skipped", f"Skipped {skipped_count}/{len(skipped)}: #{order.number} {customer}", + 0, len(importable), + {"imported": 0, "skipped": skipped_count, "errors": 0}) # Step 4: Import valid orders imported_count = 0 error_count = 0 for i, order in enumerate(importable): - progress_str = f"{i+1}/{len(importable)}" - _current_sync["progress"] = f"Importing {progress_str}: #{order.number}" + # Derive shipping / billing names + shipping_name = "" + if order.shipping: + shipping_name = f"{getattr(order.shipping, 'firstname', '') or ''} {getattr(order.shipping, 'lastname', '') or ''}".strip() + billing_name = f"{getattr(order.billing, 'firstname', '') or ''} {getattr(order.billing, 'lastname', '') or ''}".strip() + if not shipping_name: + shipping_name = billing_name + customer = shipping_name or order.billing.company_name or billing_name + payment_method = getattr(order, 'payment_name', None) or None + delivery_method = getattr(order, 'delivery_name', None) or None + + _update_progress("import", + f"Import {i+1}/{len(importable)}: #{order.number} {customer}", + i + 1, len(importable), + {"imported": imported_count, "skipped": len(skipped), "errors": error_count}) result = await asyncio.to_thread( import_service.import_single_order, order, id_pol=id_pol, id_sectie=id_sectie ) - customer = order.billing.company_name or \ - f"{order.billing.firstname} {order.billing.lastname}" # Build order items data for storage (R9) order_items_data = [] @@ -287,7 +303,11 @@ async def run_sync(id_pol: int = None, id_sectie: int = None, run_id: str = None status="IMPORTED", id_comanda=result["id_comanda"], id_partener=result["id_partener"], - items_count=len(order.items) + items_count=len(order.items), + shipping_name=shipping_name, + billing_name=billing_name, + payment_method=payment_method, + delivery_method=delivery_method, ) await sqlite_service.add_sync_run_order(run_id, order.number, "IMPORTED") # Store ROA address IDs (R9) @@ -298,13 +318,6 @@ async def run_sync(id_pol: int = None, id_sectie: int = None, run_id: str = None ) await sqlite_service.add_order_items(order.number, order_items_data) _log_line(run_id, f"#{order.number} [{order.date or '?'}] {customer} → IMPORTAT (ID: {result['id_comanda']})") - await _emit({ - "type": "order_result", "run_id": run_id, - "order_number": order.number, "customer_name": customer, - "order_date": order.date, - "status": "IMPORTED", "items_count": len(order.items), - "id_comanda": result["id_comanda"], "progress": progress_str - }) else: error_count += 1 await sqlite_service.upsert_order( @@ -315,18 +328,15 @@ async def run_sync(id_pol: int = None, id_sectie: int = None, run_id: str = None status="ERROR", id_partener=result.get("id_partener"), error_message=result["error"], - items_count=len(order.items) + items_count=len(order.items), + shipping_name=shipping_name, + billing_name=billing_name, + payment_method=payment_method, + delivery_method=delivery_method, ) await sqlite_service.add_sync_run_order(run_id, order.number, "ERROR") await sqlite_service.add_order_items(order.number, order_items_data) _log_line(run_id, f"#{order.number} [{order.date or '?'}] {customer} → EROARE: {result['error']}") - await _emit({ - "type": "order_result", "run_id": run_id, - "order_number": order.number, "customer_name": customer, - "order_date": order.date, - "status": "ERROR", "error_message": result["error"], - "items_count": len(order.items), "progress": progress_str - }) # Safety: stop if too many errors if error_count > 10: @@ -351,11 +361,18 @@ async def run_sync(id_pol: int = None, id_sectie: int = None, run_id: str = None "missing_skus": len(validation["missing"]) } + _update_progress("completed", + f"Completed: {imported_count} imported, {len(skipped)} skipped, {error_count} errors", + len(importable), len(importable), + {"imported": imported_count, "skipped": len(skipped), "errors": error_count}) + if _current_sync: + _current_sync["status"] = status + _current_sync["finished_at"] = datetime.now().isoformat() + logger.info( f"Sync {run_id} completed: {imported_count} imported, " f"{len(skipped)} skipped, {error_count} errors" ) - await _emit({"type": "completed", "run_id": run_id, "summary": summary}) duration = (datetime.now() - started_dt).total_seconds() _log_line(run_id, "") @@ -367,8 +384,10 @@ async def run_sync(id_pol: int = None, id_sectie: int = None, run_id: str = None logger.error(f"Sync {run_id} failed: {e}") _log_line(run_id, f"EROARE FATALA: {e}") await sqlite_service.update_sync_run(run_id, "failed", 0, 0, 0, 1, error_message=str(e)) - _current_sync["error"] = str(e) - await _emit({"type": "failed", "run_id": run_id, "error": str(e)}) + if _current_sync: + _current_sync["status"] = "failed" + _current_sync["finished_at"] = datetime.now().isoformat() + _current_sync["error"] = str(e) return {"run_id": run_id, "status": "failed", "error": str(e)} finally: # Keep _current_sync for 10 seconds so status endpoint can show final result diff --git a/api/app/static/css/style.css b/api/app/static/css/style.css index 31789c2..12cd0f8 100644 --- a/api/app/static/css/style.css +++ b/api/app/static/css/style.css @@ -302,3 +302,191 @@ tr.mapping-deleted td { .cursor-pointer { cursor: pointer; } + +/* ── Typography scale ────────────────────────────── */ +.text-header { font-size: 1.25rem; font-weight: 600; } +.text-card-head { font-size: 1rem; font-weight: 600; } +.text-body { font-size: 0.8125rem; } +.text-badge { font-size: 0.75rem; } +.text-label { font-size: 0.6875rem; } + +/* ── Filter bar — shared across dashboard, mappings, missing_skus pages ── */ +.filter-bar { + display: flex; + align-items: center; + gap: 0.5rem; + flex-wrap: wrap; + padding: 0.625rem 0; +} +.filter-pill { + display: inline-flex; + align-items: center; + gap: 0.3rem; + padding: 0.25rem 0.625rem; + border: 1px solid #d1d5db; + border-radius: 999px; + background: #fff; + font-size: 0.8125rem; + cursor: pointer; + transition: background 0.15s, border-color 0.15s; + white-space: nowrap; +} +.filter-pill:hover { background: #f3f4f6; } +.filter-pill.active { + background: #1d4ed8; + border-color: #1d4ed8; + color: #fff; +} +.filter-pill.active .filter-count { background: rgba(255,255,255,0.25); color: #fff; } +.filter-count { + display: inline-block; + min-width: 1.25rem; + padding: 0 0.3rem; + border-radius: 999px; + background: #e5e7eb; + font-size: 0.7rem; + font-weight: 600; + text-align: center; + line-height: 1.4; +} + +/* ── Search input (used in filter bars) ─────────── */ +.search-input { + margin-left: auto; + padding: 0.25rem 0.625rem; + border: 1px solid #d1d5db; + border-radius: 6px; + font-size: 0.8125rem; + outline: none; + min-width: 180px; +} +.search-input:focus { border-color: #1d4ed8; } + +/* ── Tooltip for Client/Cont ─────────────────────── */ +.tooltip-cont { + position: relative; + cursor: default; +} +.tooltip-cont::after { + content: attr(data-tooltip); + position: absolute; + bottom: 125%; + left: 50%; + transform: translateX(-50%); + background: #1f2937; + color: #f9fafb; + font-size: 0.75rem; + padding: 0.3rem 0.6rem; + border-radius: 4px; + white-space: nowrap; + pointer-events: none; + opacity: 0; + transition: opacity 0.15s; + z-index: 10; +} +.tooltip-cont:hover::after { opacity: 1; } + +/* ── Sync card ───────────────────────────────────── */ +.sync-card { + background: #fff; + border: 1px solid #e5e7eb; + border-radius: 8px; + overflow: hidden; + margin-bottom: 1rem; +} +.sync-card-controls { + display: flex; + align-items: center; + gap: 0.75rem; + padding: 0.75rem 1rem; + flex-wrap: wrap; +} +.sync-card-divider { + height: 1px; + background: #e5e7eb; + margin: 0; +} +.sync-card-info { + display: flex; + align-items: center; + gap: 1rem; + padding: 0.5rem 1rem; + font-size: 0.8125rem; + color: #6b7280; + cursor: pointer; + transition: background 0.12s; +} +.sync-card-info:hover { background: #f9fafb; } +.sync-card-progress { + display: flex; + align-items: center; + gap: 0.5rem; + padding: 0.4rem 1rem; + background: #eff6ff; + font-size: 0.8125rem; + color: #1d4ed8; + border-top: 1px solid #dbeafe; +} + +/* ── Pulsing live dot ────────────────────────────── */ +.sync-live-dot { + display: inline-block; + width: 8px; + height: 8px; + border-radius: 50%; + background: #3b82f6; + animation: pulse-dot 1.2s ease-in-out infinite; + flex-shrink: 0; +} +@keyframes pulse-dot { + 0%, 100% { opacity: 1; transform: scale(1); } + 50% { opacity: 0.4; transform: scale(0.75); } +} + +/* ── Status dot (idle/running/completed/failed) ──── */ +.sync-status-dot { + display: inline-block; + width: 10px; + height: 10px; + border-radius: 50%; + flex-shrink: 0; +} +.sync-status-dot.idle { background: #9ca3af; } +.sync-status-dot.running { background: #3b82f6; animation: pulse-dot 1.2s ease-in-out infinite; } +.sync-status-dot.completed { background: #10b981; } +.sync-status-dot.failed { background: #ef4444; } + +/* ── Custom period range inputs ──────────────────── */ +.period-custom-range { + display: none; + gap: 0.375rem; + align-items: center; + font-size: 0.8125rem; +} +.period-custom-range.visible { display: flex; } + +/* ── Compact button ──────────────────────────────── */ +.btn-compact { + padding: 0.3rem 0.75rem; + font-size: 0.8125rem; +} + +/* ── Compact select ──────────────────────────────── */ +.select-compact { + padding: 0.25rem 0.5rem; + font-size: 0.8125rem; + border: 1px solid #d1d5db; + border-radius: 6px; + background: #fff; + cursor: pointer; +} + +/* ── Result banner ───────────────────────────────── */ +.result-banner { + padding: 0.4rem 0.75rem; + border-radius: 6px; + font-size: 0.8125rem; + background: #d1fae5; + color: #065f46; + border: 1px solid #6ee7b7; +} diff --git a/api/app/static/js/dashboard.js b/api/app/static/js/dashboard.js index 2ab12f1..3ccc599 100644 --- a/api/app/static/js/dashboard.js +++ b/api/app/static/js/dashboard.js @@ -1,138 +1,215 @@ -let refreshInterval = null; +// ── State ───────────────────────────────────────── let dashPage = 1; -let dashFilter = 'all'; -let dashSearch = ''; +let dashPerPage = 50; let dashSortCol = 'order_date'; let dashSortDir = 'desc'; let dashSearchTimeout = null; -let dashPeriodDays = 7; let currentQmSku = ''; let currentQmOrderNumber = ''; let qmAcTimeout = null; -let syncEventSource = null; + +// Sync polling state +let _pollInterval = null; +let _lastSyncStatus = null; +let _lastRunId = null; + +// ── Init ────────────────────────────────────────── document.addEventListener('DOMContentLoaded', () => { loadSchedulerStatus(); - loadSyncStatus(); - loadLastSync(); loadDashOrders(); - refreshInterval = setInterval(() => { - loadSyncStatus(); - }, 10000); + startSyncPolling(); + wireFilterBar(); }); -// ── Sync Status ────────────────────────────────── +// ── Smart Sync Polling ──────────────────────────── -async function loadSyncStatus() { +function startSyncPolling() { + if (_pollInterval) clearInterval(_pollInterval); + _pollInterval = setInterval(pollSyncStatus, 30000); + pollSyncStatus(); // immediate first call +} + +async function pollSyncStatus() { try { - const res = await fetch('/api/sync/status'); - const data = await res.json(); - - const badge = document.getElementById('syncStatusBadge'); - const status = data.status || 'idle'; - badge.textContent = status; - badge.className = 'badge ' + (status === 'running' ? 'bg-primary' : status === 'failed' ? 'bg-danger' : 'bg-secondary'); - - if (status === 'running') { - document.getElementById('btnStartSync').classList.add('d-none'); - document.getElementById('btnStopSync').classList.remove('d-none'); - document.getElementById('syncProgressText').textContent = data.progress || 'Running...'; - } else { - document.getElementById('btnStartSync').classList.remove('d-none'); - document.getElementById('btnStopSync').classList.add('d-none'); - - const stats = data.stats || {}; - if (stats.last_run) { - const lr = stats.last_run; - const started = lr.started_at ? new Date(lr.started_at).toLocaleString('ro-RO') : ''; - document.getElementById('syncProgressText').textContent = - `Ultimul: ${started} | ${lr.imported || 0} ok, ${lr.skipped || 0} nemapate, ${lr.errors || 0} erori`; - } else { - document.getElementById('syncProgressText').textContent = ''; - } + const data = await fetchJSON('/api/sync/status'); + updateSyncPanel(data); + const isRunning = data.status === 'running'; + const wasRunning = _lastSyncStatus === 'running'; + if (isRunning && !wasRunning) { + // Switched to running — speed up polling + clearInterval(_pollInterval); + _pollInterval = setInterval(pollSyncStatus, 3000); + } else if (!isRunning && wasRunning) { + // Sync just completed — slow down and refresh orders + clearInterval(_pollInterval); + _pollInterval = setInterval(pollSyncStatus, 30000); + loadDashOrders(); } - } catch (err) { - console.error('loadSyncStatus error:', err); + _lastSyncStatus = data.status; + } catch (e) { + console.warn('Sync status poll failed:', e); } } -// ── Last Sync Summary Card ─────────────────────── +function updateSyncPanel(data) { + const dot = document.getElementById('syncStatusDot'); + const txt = document.getElementById('syncStatusText'); + const progressArea = document.getElementById('syncProgressArea'); + const progressText = document.getElementById('syncProgressText'); + const startBtn = document.getElementById('syncStartBtn'); -async function loadLastSync() { + if (dot) { + dot.className = 'sync-status-dot ' + (data.status || 'idle'); + } + const statusLabels = { running: 'A ruleaza...', idle: 'Inactiv', completed: 'Finalizat', failed: 'Eroare' }; + if (txt) txt.textContent = statusLabels[data.status] || data.status || 'Inactiv'; + if (startBtn) startBtn.disabled = data.status === 'running'; + + // Live progress area + if (progressArea) { + progressArea.style.display = data.status === 'running' ? 'flex' : 'none'; + } + if (progressText && data.phase_text) { + progressText.textContent = data.phase_text; + } + + // Last run info + const lr = data.last_run; + if (lr) { + _lastRunId = lr.run_id; + const d = document.getElementById('lastSyncDate'); + const dur = document.getElementById('lastSyncDuration'); + const cnt = document.getElementById('lastSyncCounts'); + const st = document.getElementById('lastSyncStatus'); + if (d) d.textContent = lr.started_at ? lr.started_at.replace('T', ' ').slice(0, 16) : '\u2014'; + if (dur) dur.textContent = lr.duration_seconds ? Math.round(lr.duration_seconds) + 's' : '\u2014'; + if (cnt) cnt.textContent = '\u2191' + (lr.imported || 0) + ' \u2298' + (lr.skipped || 0) + ' \u2715' + (lr.errors || 0); + if (st) { + st.textContent = lr.status === 'completed' ? '\u2713' : '\u2715'; + st.style.color = lr.status === 'completed' ? '#10b981' : '#ef4444'; + } + } +} + +// Wire last-sync-row click → journal +document.addEventListener('DOMContentLoaded', () => { + document.getElementById('lastSyncRow')?.addEventListener('click', () => { + if (_lastRunId) window.location = '/logs?run=' + _lastRunId; + }); + document.getElementById('lastSyncRow')?.addEventListener('keydown', (e) => { + if ((e.key === 'Enter' || e.key === ' ') && _lastRunId) { + window.location = '/logs?run=' + _lastRunId; + } + }); +}); + +// ── Sync Controls ───────────────────────────────── + +async function startSync() { try { - const res = await fetch('/api/sync/history?per_page=1'); + const res = await fetch('/api/sync/start', { method: 'POST' }); const data = await res.json(); - const runs = data.runs || []; - - if (runs.length === 0) { - document.getElementById('lastSyncDate').textContent = '-'; + if (data.error) { + alert(data.error); return; } - - const r = runs[0]; - document.getElementById('lastSyncDate').textContent = r.started_at - ? new Date(r.started_at).toLocaleString('ro-RO', {day:'2-digit',month:'2-digit',hour:'2-digit',minute:'2-digit'}) - : '-'; - - const statusClass = r.status === 'completed' ? 'bg-success' : r.status === 'running' ? 'bg-primary' : 'bg-danger'; - document.getElementById('lastSyncStatus').innerHTML = `${esc(r.status)}`; - document.getElementById('lastSyncImported').textContent = r.imported || 0; - document.getElementById('lastSyncSkipped').textContent = r.skipped || 0; - document.getElementById('lastSyncErrors').textContent = r.errors || 0; - - if (r.started_at && r.finished_at) { - const sec = Math.round((new Date(r.finished_at) - new Date(r.started_at)) / 1000); - document.getElementById('lastSyncDuration').textContent = sec < 60 ? `${sec}s` : `${Math.floor(sec/60)}m ${sec%60}s`; - } else { - document.getElementById('lastSyncDuration').textContent = '-'; - } + // Polling will detect the running state — just speed it up immediately + pollSyncStatus(); } catch (err) { - console.error('loadLastSync error:', err); + alert('Eroare: ' + err.message); } } -// ── Dashboard Orders Table ─────────────────────── - -function debounceDashSearch() { - clearTimeout(dashSearchTimeout); - dashSearchTimeout = setTimeout(() => { - dashSearch = document.getElementById('dashSearchInput').value; - dashPage = 1; - loadDashOrders(); - }, 300); +async function stopSync() { + try { + await fetch('/api/sync/stop', { method: 'POST' }); + pollSyncStatus(); + } catch (err) { + alert('Eroare: ' + err.message); + } } -function dashFilterOrders(filter) { - dashFilter = filter; - dashPage = 1; +async function toggleScheduler() { + const enabled = document.getElementById('schedulerToggle').checked; + const interval = parseInt(document.getElementById('schedulerInterval').value) || 10; + try { + await fetch('/api/sync/schedule', { + method: 'PUT', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ enabled, interval_minutes: interval }) + }); + } catch (err) { + alert('Eroare scheduler: ' + err.message); + } +} - // Update button styles - const colorMap = { - 'all': 'primary', - 'IMPORTED': 'success', - 'SKIPPED': 'warning', - 'ERROR': 'danger', - 'UNINVOICED': 'info' - }; - document.querySelectorAll('#dashFilterBtns button').forEach(btn => { - const text = btn.textContent.trim().split(' ')[0]; - let btnFilter = 'all'; - if (text === 'Importate') btnFilter = 'IMPORTED'; - else if (text === 'Omise') btnFilter = 'SKIPPED'; - else if (text === 'Erori') btnFilter = 'ERROR'; - else if (text === 'Nefacturate') btnFilter = 'UNINVOICED'; +async function updateSchedulerInterval() { + const enabled = document.getElementById('schedulerToggle').checked; + if (enabled) { + await toggleScheduler(); + } +} - const color = colorMap[btnFilter] || 'primary'; - if (btnFilter === filter) { - btn.className = `btn btn-sm btn-${color}`; +async function loadSchedulerStatus() { + try { + const res = await fetch('/api/sync/schedule'); + const data = await res.json(); + document.getElementById('schedulerToggle').checked = data.enabled || false; + if (data.interval_minutes) { + document.getElementById('schedulerInterval').value = data.interval_minutes; + } + } catch (err) { + console.error('loadSchedulerStatus error:', err); + } +} + +// ── Filter Bar wiring ───────────────────────────── + +function wireFilterBar() { + // Period dropdown + document.getElementById('periodSelect')?.addEventListener('change', function () { + const cr = document.getElementById('customRangeInputs'); + if (this.value === 'custom') { + cr?.classList.add('visible'); } else { - btn.className = `btn btn-sm btn-outline-${color}`; + cr?.classList.remove('visible'); + dashPage = 1; + loadDashOrders(); } }); - loadDashOrders(); + // Custom range inputs + ['periodStart', 'periodEnd'].forEach(id => { + document.getElementById(id)?.addEventListener('change', () => { + const s = document.getElementById('periodStart')?.value; + const e = document.getElementById('periodEnd')?.value; + if (s && e) { dashPage = 1; loadDashOrders(); } + }); + }); + + // Status pills + document.querySelectorAll('.filter-pill[data-status]').forEach(btn => { + btn.addEventListener('click', function () { + document.querySelectorAll('.filter-pill[data-status]').forEach(b => b.classList.remove('active')); + this.classList.add('active'); + dashPage = 1; + loadDashOrders(); + }); + }); + + // Search — 300ms debounce + document.getElementById('orderSearch')?.addEventListener('input', () => { + clearTimeout(dashSearchTimeout); + dashSearchTimeout = setTimeout(() => { + dashPage = 1; + loadDashOrders(); + }, 300); + }); } +// ── Dashboard Orders Table ──────────────────────── + function dashSortBy(col) { if (dashSortCol === col) { dashSortDir = dashSortDir === 'asc' ? 'desc' : 'asc'; @@ -140,8 +217,6 @@ function dashSortBy(col) { dashSortCol = col; dashSortDir = 'asc'; } - // Update sort icons - document.querySelectorAll('#dashOrdersBody').forEach(() => {}); // noop document.querySelectorAll('.sort-icon').forEach(span => { const c = span.dataset.col; span.textContent = c === dashSortCol ? (dashSortDir === 'asc' ? '\u2191' : '\u2193') : ''; @@ -150,39 +225,45 @@ function dashSortBy(col) { loadDashOrders(); } -function dashSetPeriod(days) { - dashPeriodDays = days; - dashPage = 1; - document.querySelectorAll('#dashPeriodBtns button').forEach(btn => { - const val = parseInt(btn.dataset.days); - btn.className = val === days - ? 'btn btn-sm btn-secondary' - : 'btn btn-sm btn-outline-secondary'; - }); - loadDashOrders(); -} - async function loadDashOrders() { - const params = new URLSearchParams({ - page: dashPage, - per_page: 50, - search: dashSearch, - status: dashFilter, - sort_by: dashSortCol, - sort_dir: dashSortDir, - period_days: dashPeriodDays - }); + const periodVal = document.getElementById('periodSelect')?.value || '7'; + const params = new URLSearchParams(); + + if (periodVal === 'custom') { + const s = document.getElementById('periodStart')?.value; + const e = document.getElementById('periodEnd')?.value; + if (s && e) { + params.set('period_start', s); + params.set('period_end', e); + params.set('period_days', '0'); + } + } else { + params.set('period_days', periodVal); + } + + const activeStatus = document.querySelector('.filter-pill.active')?.dataset.status; + if (activeStatus && activeStatus !== 'all') params.set('status', activeStatus); + + const search = document.getElementById('orderSearch')?.value?.trim(); + if (search) params.set('search', search); + + params.set('page', dashPage); + params.set('per_page', dashPerPage); + params.set('sort_by', dashSortCol); + params.set('sort_dir', dashSortDir); try { const res = await fetch(`/api/dashboard/orders?${params}`); const data = await res.json(); - const counts = data.counts || {}; - document.getElementById('dashCountAll').textContent = counts.total || 0; - document.getElementById('dashCountImported').textContent = counts.imported || 0; - document.getElementById('dashCountSkipped').textContent = counts.skipped || 0; - document.getElementById('dashCountError').textContent = counts.error || 0; - document.getElementById('dashCountUninvoiced').textContent = counts.uninvoiced || 0; + // Update filter-pill badge counts + const c = data.counts || {}; + const el = (id) => document.getElementById(id); + if (el('cntAll')) el('cntAll').textContent = c.total || 0; + if (el('cntImp')) el('cntImp').textContent = c.imported || 0; + if (el('cntSkip')) el('cntSkip').textContent = c.skipped || 0; + if (el('cntErr')) el('cntErr').textContent = c.error || c.errors || 0; + if (el('cntNef')) el('cntNef').textContent = c.uninvoiced || c.nefacturate || 0; const tbody = document.getElementById('dashOrdersBody'); const orders = data.orders || []; @@ -212,7 +293,7 @@ async function loadDashOrders() { return ` ${esc(o.order_number)} ${dateStr} - ${esc(o.customer_name)} + ${renderClientCell(o)} ${o.items_count || 0} ${statusBadge} ${o.id_comanda || '-'} @@ -223,19 +304,23 @@ async function loadDashOrders() { } // Pagination - const totalPages = data.pages || 1; - document.getElementById('dashPageInfo').textContent = `${data.total || 0} comenzi | Pagina ${dashPage} din ${totalPages}`; + const pag = data.pagination || {}; + const totalPages = pag.total_pages || data.pages || 1; + const totalOrders = (data.counts || {}).total || data.total || 0; + const pageInfo = `${totalOrders} comenzi | Pagina ${dashPage} din ${totalPages}`; + document.getElementById('dashPageInfo').textContent = pageInfo; + const pagInfoTop = document.getElementById('dashPageInfoTop'); + if (pagInfoTop) pagInfoTop.textContent = pageInfo; + const pagHtml = totalPages > 1 ? ` + + ${dashPage} / ${totalPages} + + ` : ''; const pagDiv = document.getElementById('dashPagination'); - if (totalPages > 1) { - pagDiv.innerHTML = ` - - ${dashPage} / ${totalPages} - - `; - } else { - pagDiv.innerHTML = ''; - } + if (pagDiv) pagDiv.innerHTML = pagHtml; + const pagDivTop = document.getElementById('dashPaginationTop'); + if (pagDivTop) pagDivTop.innerHTML = pagHtml; // Update sort icons document.querySelectorAll('.sort-icon').forEach(span => { @@ -253,7 +338,44 @@ function dashGoPage(p) { loadDashOrders(); } -// ── Helper functions ───────────────────────────── +function dashChangePerPage(val) { + dashPerPage = parseInt(val) || 50; + dashPage = 1; + loadDashOrders(); +} + +// ── Client cell with Cont tooltip (Task F4) ─────── + +function renderClientCell(order) { + const shipping = (order.shipping_name || order.customer_name || '').trim(); + const billing = (order.billing_name || '').trim(); + const isDiff = order.is_different_person && billing && shipping !== billing; + if (isDiff) { + return `${escHtml(shipping)} `; + } + return `${escHtml(shipping || billing || '\u2014')}`; +} + +// ── Helper functions ────────────────────────────── + +async function fetchJSON(url) { + const res = await fetch(url); + if (!res.ok) throw new Error(`HTTP ${res.status}`); + return res.json(); +} + +function escHtml(s) { + if (s == null) return ''; + return String(s) + .replace(/&/g, '&') + .replace(//g, '>') + .replace(/"/g, '"') + .replace(/'/g, '''); +} + +// Alias kept for backward compat with inline handlers in modal +function esc(s) { return escHtml(s); } function fmtDate(dateStr) { if (!dateStr) return '-'; @@ -289,7 +411,7 @@ function renderCodmatCell(item) { ).join(''); } -// ── Order Detail Modal ─────────────────────────── +// ── Order Detail Modal ──────────────────────────── async function openDashOrderDetail(orderNumber) { document.getElementById('detailOrderNumber').textContent = '#' + orderNumber; @@ -367,7 +489,7 @@ async function openDashOrderDetail(orderNumber) { } } -// ── Quick Map Modal ────────────────────────────── +// ── Quick Map Modal ─────────────────────────────── function openQuickMap(sku, productName, orderNumber) { currentQmSku = sku; @@ -435,7 +557,7 @@ async function qmAutocomplete(input, dropdown, selectedEl) { dropdown.innerHTML = data.results.map(r => `
- ${esc(r.codmat)}${esc(r.denumire)}${r.um ? ` (${esc(r.um)})` : ''} + ${esc(r.codmat)}${esc(r.denumire)}${r.um ? ` (${esc(r.um)})` : ''}
` ).join(''); dropdown.classList.remove('d-none'); @@ -500,126 +622,3 @@ async function saveQuickMapping() { alert('Eroare: ' + err.message); } } - -// ── Sync Controls ──────────────────────────────── - -async function startSync() { - try { - const res = await fetch('/api/sync/start', { method: 'POST' }); - const data = await res.json(); - if (data.error) { - alert(data.error); - return; - } - if (data.run_id) { - const banner = document.getElementById('syncStartedBanner'); - const link = document.getElementById('syncRunLink'); - if (banner && link) { - link.href = '/logs?run=' + encodeURIComponent(data.run_id); - banner.classList.remove('d-none'); - } - // Subscribe to SSE for live progress + auto-refresh on completion - listenToSyncStream(data.run_id); - } - loadSyncStatus(); - } catch (err) { - alert('Eroare: ' + err.message); - } -} - -function listenToSyncStream(runId) { - // Close any previous SSE connection - if (syncEventSource) { syncEventSource.close(); syncEventSource = null; } - - syncEventSource = new EventSource('/api/sync/stream'); - - syncEventSource.onmessage = (event) => { - try { - const data = JSON.parse(event.data); - - if (data.type === 'phase') { - document.getElementById('syncProgressText').textContent = data.message || ''; - } - - if (data.type === 'order_result') { - // Update progress text with current order info - const status = data.status === 'IMPORTED' ? 'OK' : data.status === 'SKIPPED' ? 'OMIS' : 'ERR'; - document.getElementById('syncProgressText').textContent = - `[${data.progress || ''}] #${data.order_number} ${data.customer_name || ''} → ${status}`; - } - - if (data.type === 'completed' || data.type === 'failed') { - syncEventSource.close(); - syncEventSource = null; - // Refresh all dashboard sections - loadLastSync(); - loadDashOrders(); - loadSyncStatus(); - // Hide banner after 5s - setTimeout(() => { - document.getElementById('syncStartedBanner')?.classList.add('d-none'); - }, 5000); - } - } catch (e) { - console.error('SSE parse error:', e); - } - }; - - syncEventSource.onerror = () => { - syncEventSource.close(); - syncEventSource = null; - // Refresh anyway — sync may have finished - loadLastSync(); - loadDashOrders(); - loadSyncStatus(); - }; -} - -async function stopSync() { - try { - await fetch('/api/sync/stop', { method: 'POST' }); - loadSyncStatus(); - } catch (err) { - alert('Eroare: ' + err.message); - } -} - -async function toggleScheduler() { - const enabled = document.getElementById('schedulerToggle').checked; - const interval = parseInt(document.getElementById('schedulerInterval').value) || 5; - - try { - await fetch('/api/sync/schedule', { - method: 'PUT', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ enabled, interval_minutes: interval }) - }); - } catch (err) { - alert('Eroare scheduler: ' + err.message); - } -} - -async function updateSchedulerInterval() { - const enabled = document.getElementById('schedulerToggle').checked; - if (enabled) { - await toggleScheduler(); - } -} - -async function loadSchedulerStatus() { - try { - const res = await fetch('/api/sync/schedule'); - const data = await res.json(); - document.getElementById('schedulerToggle').checked = data.enabled || false; - if (data.interval_minutes) { - document.getElementById('schedulerInterval').value = data.interval_minutes; - } - } catch (err) { - console.error('loadSchedulerStatus error:', err); - } -} - -function esc(s) { - if (s == null) return ''; - return String(s).replace(/&/g, '&').replace(//g, '>').replace(/"/g, '"').replace(/'/g, '''); -} diff --git a/api/app/static/js/mappings.js b/api/app/static/js/mappings.js index 55672e3..95bbfec 100644 --- a/api/app/static/js/mappings.js +++ b/api/app/static/js/mappings.js @@ -4,12 +4,14 @@ let searchTimeout = null; let sortColumn = 'sku'; let sortDirection = 'asc'; let editingMapping = null; // {sku, codmat} when editing +let pctFilter = 'all'; // Load on page ready document.addEventListener('DOMContentLoaded', () => { loadMappings(); initAddModal(); initDeleteModal(); + initPctFilterPills(); }); function debounceSearch() { @@ -45,6 +47,30 @@ function updateSortIcons() { }); } +// ── Pct Filter Pills ───────────────────────────── + +function initPctFilterPills() { + document.querySelectorAll('.filter-pill[data-pct]').forEach(btn => { + btn.addEventListener('click', function() { + document.querySelectorAll('.filter-pill[data-pct]').forEach(b => b.classList.remove('active')); + this.classList.add('active'); + pctFilter = this.dataset.pct; + currentPage = 1; + loadMappings(); + }); + }); +} + +function updatePctCounts(counts) { + if (!counts) return; + const elAll = document.getElementById('mCntAll'); + const elComplete = document.getElementById('mCntComplete'); + const elIncomplete = document.getElementById('mCntIncomplete'); + if (elAll) elAll.textContent = counts.total || 0; + if (elComplete) elComplete.textContent = counts.complete || 0; + if (elIncomplete) elIncomplete.textContent = counts.incomplete || 0; +} + // ── Load & Render ──────────────────────────────── async function loadMappings() { @@ -58,6 +84,7 @@ async function loadMappings() { sort_dir: sortDirection }); if (showDeleted) params.set('show_deleted', 'true'); + if (pctFilter && pctFilter !== 'all') params.set('pct_filter', pctFilter); try { const res = await fetch(`/api/mappings?${params}`); @@ -71,6 +98,7 @@ async function loadMappings() { mappings = mappings.filter(m => m.activ || m.sters); } + updatePctCounts(data.counts); renderTable(mappings, showDeleted); renderPagination(data); updateSortIcons(); @@ -111,7 +139,17 @@ function renderTable(mappings, showDeleted) { let skuCell, productCell; if (isNewGroup) { const badge = isMulti ? ` Set (${skuGroupCounts[m.sku]})` : ''; - skuCell = `${esc(m.sku)}${badge}`; + // Percentage total badge + let pctBadge = ''; + if (m.pct_total !== undefined) { + if (m.is_complete) { + pctBadge = ` ✓ 100%`; + } else { + const pctVal = typeof m.pct_total === 'number' ? m.pct_total.toFixed(0) : m.pct_total; + pctBadge = ` ⚠ ${pctVal}%`; + } + } + skuCell = `${esc(m.sku)}${badge}${pctBadge}`; productCell = `${esc(m.product_name || '-')}`; } else { skuCell = ''; @@ -361,6 +399,8 @@ async function saveMapping() { bootstrap.Modal.getInstance(document.getElementById('addModal')).hide(); editingMapping = null; loadMappings(); + } else if (res.status === 409) { + handleMappingConflict(data); } else { alert('Eroare: ' + (data.error || 'Unknown')); } @@ -462,6 +502,8 @@ async function saveInlineMapping() { if (data.success) { cancelInlineAdd(); loadMappings(); + } else if (res.status === 409) { + handleMappingConflict(data); } else { alert('Eroare: ' + (data.error || 'Unknown')); } @@ -555,12 +597,17 @@ function showUndoToast(message, undoCallback) { const newBtn = undoBtn.cloneNode(true); undoBtn.parentNode.replaceChild(newBtn, undoBtn); newBtn.id = 'toastUndoBtn'; - newBtn.addEventListener('click', () => { - undoCallback(); - const toastEl = document.getElementById('undoToast'); - const inst = bootstrap.Toast.getInstance(toastEl); - if (inst) inst.hide(); - }); + if (undoCallback) { + newBtn.style.display = ''; + newBtn.addEventListener('click', () => { + undoCallback(); + const toastEl = document.getElementById('undoToast'); + const inst = bootstrap.Toast.getInstance(toastEl); + if (inst) inst.hide(); + }); + } else { + newBtn.style.display = 'none'; + } const toast = new bootstrap.Toast(document.getElementById('undoToast')); toast.show(); } @@ -639,6 +686,33 @@ async function importCsv() { function exportCsv() { window.location.href = '/api/mappings/export-csv'; } function downloadTemplate() { window.location.href = '/api/mappings/csv-template'; } +// ── Duplicate / Conflict handling ──────────────── + +function handleMappingConflict(data) { + const msg = data.error || 'Conflict la salvare'; + if (data.can_restore) { + const restore = confirm(`${msg}\n\nDoriti sa restaurati maparea stearsa?`); + if (restore) { + // Find sku/codmat from the inline row or modal + const sku = (document.getElementById('inlineSku') || document.getElementById('inputSku'))?.value?.trim(); + const codmat = (document.getElementById('inlineCodmat') || document.querySelector('.cl-codmat'))?.value?.trim(); + if (sku && codmat) { + fetch(`/api/mappings/${encodeURIComponent(sku)}/${encodeURIComponent(codmat)}/restore`, { method: 'POST' }) + .then(r => r.json()) + .then(d => { + if (d.success) { cancelInlineAdd(); loadMappings(); } + else alert('Eroare la restaurare: ' + (d.error || '')); + }); + } + } + } else { + showUndoToast(msg, null); + // Show non-dismissible inline error + const warn = document.getElementById('pctWarning'); + if (warn) { warn.textContent = msg; warn.style.display = ''; } + } +} + function esc(s) { if (s == null) return ''; return String(s).replace(/&/g, '&').replace(//g, '>').replace(/"/g, '"').replace(/'/g, '''); diff --git a/api/app/templates/dashboard.html b/api/app/templates/dashboard.html index 96b2932..097e1c9 100644 --- a/api/app/templates/dashboard.html +++ b/api/app/templates/dashboard.html @@ -5,102 +5,86 @@ {% block content %}

Panou de Comanda

- -
-
- Sync Control - idle -
-
-
-
- - -
-
-
- - -
- -
-
- -
-
-
-
- Sync pornit — vezi progresul live -
-
-
-
- - -
-
- Ultimul Sync - -
-
-
-
-
Data
-
-
Status
-
-
Importate
0
-
Omise
0
-
Erori
0
-
Durata
-
-
-
+ +
+ +
+ + Inactiv +
+ + +
+
+
+ +
+ + + + + ↗ jurnal +
+ +
-
-
- Comenzi -
- - - - -
-
-
- - +
+ Comenzi +
+
+
+ + + +
+ + + +
+ + + + + + + +
-
-
- - - - - + +
+ +
+ +
diff --git a/api/app/templates/mappings.html b/api/app/templates/mappings.html index bc1e301..e0536dc 100644 --- a/api/app/templates/mappings.html +++ b/api/app/templates/mappings.html @@ -3,6 +3,11 @@ {% block nav_mappings %}active{% endblock %} {% block content %} +

Mapari SKU

@@ -36,6 +41,13 @@
+ +
+ + + +
+
diff --git a/api/app/templates/missing_skus.html b/api/app/templates/missing_skus.html index 4ffb89c..e9fdf70 100644 --- a/api/app/templates/missing_skus.html +++ b/api/app/templates/missing_skus.html @@ -9,24 +9,29 @@ -
- -
- - - + + +
+ +
@@ -92,77 +97,134 @@ let currentMapSku = ''; let mapAcTimeout = null; let currentPage = 1; -let currentResolved = 0; +let skuStatusFilter = 'unresolved'; const perPage = 20; -document.addEventListener('DOMContentLoaded', () => { - loadMissing(1); +// ── Filter pills ────────────────────────────────── +document.querySelectorAll('.filter-pill[data-sku-status]').forEach(btn => { + btn.addEventListener('click', function() { + document.querySelectorAll('.filter-pill[data-sku-status]').forEach(b => b.classList.remove('active')); + this.classList.add('active'); + skuStatusFilter = this.dataset.skuStatus; + currentPage = 1; + loadMissingSkus(); + }); }); -function setResolvedFilter(val) { - currentResolved = val; - currentPage = 1; - // Update button styles - document.getElementById('btnUnresolved').className = 'btn btn-sm ' + (val === 0 ? 'btn-primary' : 'btn-outline-primary'); - document.getElementById('btnResolved').className = 'btn btn-sm ' + (val === 1 ? 'btn-success' : 'btn-outline-success'); - document.getElementById('btnAll').className = 'btn btn-sm ' + (val === -1 ? 'btn-secondary' : 'btn-outline-secondary'); - loadMissing(1); +// ── Search with debounce ───────────────────────── +let skuSearchTimer = null; +document.getElementById('skuSearch')?.addEventListener('input', function() { + clearTimeout(skuSearchTimer); + skuSearchTimer = setTimeout(() => { currentPage = 1; loadMissingSkus(); }, 300); +}); + +// ── Rescan ──────────────────────────────────────── +document.getElementById('rescanBtn')?.addEventListener('click', async function() { + this.disabled = true; + const prog = document.getElementById('rescanProgress'); + const result = document.getElementById('rescanResult'); + const progText = document.getElementById('rescanProgressText'); + if (prog) { prog.style.display = 'flex'; } + if (result) result.style.display = 'none'; + try { + const data = await fetch('/api/validate/scan', { method: 'POST' }).then(r => r.json()); + if (progText) progText.textContent = 'Gata.'; + if (result) { + result.innerHTML = `✓ ${data.total_skus_scanned || 0} scanate  |  ${data.new_missing || 0} noi lipsa  |  ${data.auto_resolved || 0} rezolvate`; + result.style.display = 'block'; + } + loadMissingSkus(); + } catch(e) { + if (progText) progText.textContent = 'Eroare.'; + } finally { + this.disabled = false; + setTimeout(() => { if (prog) prog.style.display = 'none'; }, 2500); + } +}); + +document.addEventListener('DOMContentLoaded', () => { + loadMissingSkus(); +}); + +function resolvedParamFor(statusFilter) { + if (statusFilter === 'resolved') return 1; + if (statusFilter === 'all') return -1; + return 0; // unresolved (default) } -async function loadMissing(page) { - currentPage = page || 1; - try { - const res = await fetch(`/api/validate/missing-skus?page=${currentPage}&per_page=${perPage}&resolved=${currentResolved}`); - const data = await res.json(); - const tbody = document.getElementById('missingBody'); +function loadMissingSkus(page) { + currentPage = page || currentPage; + const params = new URLSearchParams(); + const resolvedVal = resolvedParamFor(skuStatusFilter); + params.set('resolved', resolvedVal); + params.set('page', currentPage); + params.set('per_page', perPage); + const search = document.getElementById('skuSearch')?.value?.trim(); + if (search) params.set('search', search); + fetch('/api/validate/missing-skus?' + params.toString()) + .then(r => r.json()) + .then(data => { + const c = data.counts || {}; + const el = id => document.getElementById(id); + if (el('cntUnres')) el('cntUnres').textContent = c.unresolved || 0; + if (el('cntRes')) el('cntRes').textContent = c.resolved || 0; + if (el('cntAllSkus')) el('cntAllSkus').textContent = c.total || 0; + renderMissingSkusTable(data.skus || data.missing_skus || [], data); + renderPagination(data); + }) + .catch(err => { + document.getElementById('missingBody').innerHTML = + `${err.message}`; + }); +} + +// Keep backward compat alias +function loadMissing(page) { loadMissingSkus(page); } + +function renderMissingSkusTable(skus, data) { + const tbody = document.getElementById('missingBody'); + if (data) { document.getElementById('missingInfo').textContent = `Total: ${data.total || 0} | Pagina: ${data.page || 1} din ${data.pages || 1}`; - - const skus = data.missing_skus || []; - if (skus.length === 0) { - const msg = currentResolved === 0 ? 'Toate SKU-urile sunt mapate!' : - currentResolved === 1 ? 'Niciun SKU rezolvat' : 'Niciun SKU gasit'; - tbody.innerHTML = `${msg}`; - renderPagination(data); - return; - } - - tbody.innerHTML = skus.map(s => { - const statusBadge = s.resolved - ? 'Rezolvat' - : 'Nerezolvat'; - - let firstCustomer = '-'; - try { - const customers = JSON.parse(s.customers || '[]'); - if (customers.length > 0) firstCustomer = customers[0]; - } catch (e) { /* ignore */ } - - const orderCount = s.order_count != null ? s.order_count : '-'; - - return ` - ${esc(s.sku)} - ${esc(s.product_name || '-')} - ${esc(orderCount)} - ${esc(firstCustomer)} - ${s.first_seen ? new Date(s.first_seen).toLocaleDateString('ro-RO') : '-'} - ${statusBadge} - - ${!s.resolved - ? ` - - ` - : `${s.resolved_at ? new Date(s.resolved_at).toLocaleDateString('ro-RO') : ''}`} - - `; - }).join(''); - - renderPagination(data); - } catch (err) { - document.getElementById('missingBody').innerHTML = - `${err.message}`; } + + if (!skus || skus.length === 0) { + const msg = skuStatusFilter === 'unresolved' ? 'Toate SKU-urile sunt mapate!' : + skuStatusFilter === 'resolved' ? 'Niciun SKU rezolvat' : 'Niciun SKU gasit'; + tbody.innerHTML = `${msg}`; + return; + } + + tbody.innerHTML = skus.map(s => { + const statusBadge = s.resolved + ? 'Rezolvat' + : 'Nerezolvat'; + + let firstCustomer = '-'; + try { + const customers = JSON.parse(s.customers || '[]'); + if (customers.length > 0) firstCustomer = customers[0]; + } catch (e) { /* ignore */ } + + const orderCount = s.order_count != null ? s.order_count : '-'; + + return ` + ${esc(s.sku)} + ${esc(s.product_name || '-')} + ${esc(orderCount)} + ${esc(firstCustomer)} + ${s.first_seen ? new Date(s.first_seen).toLocaleDateString('ro-RO') : '-'} + ${statusBadge} + + ${!s.resolved + ? ` + + ` + : `${s.resolved_at ? new Date(s.resolved_at).toLocaleDateString('ro-RO') : ''}`} + + `; + }).join(''); } function renderPagination(data) { @@ -173,20 +235,20 @@ function renderPagination(data) { let html = ''; html += `
  • - Anterior
  • `; + Anterior`; const range = 2; for (let i = 1; i <= total; i++) { if (i === 1 || i === total || (i >= page - range && i <= page + range)) { html += `
  • - ${i}
  • `; + ${i}`; } else if (i === page - range - 1 || i === page + range + 1) { html += `
  • `; } } html += `
  • - Urmator
  • `; + Urmator`; ul.innerHTML = html; } @@ -325,7 +387,7 @@ async function saveQuickMap() { const data = await res.json(); if (data.success) { bootstrap.Modal.getInstance(document.getElementById('mapModal')).hide(); - loadMissing(currentPage); + loadMissingSkus(currentPage); } else { alert('Eroare: ' + (data.error || 'Unknown')); } @@ -334,15 +396,6 @@ async function saveQuickMap() { } } -async function scanForMissing() { - try { - await fetch('/api/validate/scan', { method: 'POST' }); - loadMissing(1); - } catch (err) { - alert('Eroare scan: ' + err.message); - } -} - function exportMissingCsv() { window.location.href = '/api/validate/missing-skus-csv'; }