feat(sync): add SSE live feed, unified logs page, fix Oracle connection
- Add SSE event bus in sync_service (subscribe/unsubscribe/_emit) - Add GET /api/sync/stream SSE endpoint for real-time sync progress - Rewrite logs.html: unified runs table + live feed + summary + filters - Rewrite logs.js: SSE EventSource client, run selection, pagination - Dashboard: clickable runs navigate to /logs?run=, sync started banner - Remove "Import Comenzi" nav item, delete sync_detail.html - Add error_message column to sync_runs table with migration - Fix: export TNS_ADMIN as OS env var so oracledb finds tnsnames.ora - Fix: use get_oracle_connection() instead of direct pool.acquire() - Fix: CRM_POLITICI_PRET_ART INSERT to match actual table schema Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -13,6 +13,33 @@ logger = logging.getLogger(__name__)
|
||||
_sync_lock = asyncio.Lock()
|
||||
_current_sync = None # dict with run_id, status, progress info
|
||||
|
||||
# SSE subscriber system
|
||||
_subscribers: list[asyncio.Queue] = []
|
||||
|
||||
|
||||
def subscribe() -> asyncio.Queue:
|
||||
"""Subscribe to sync events. Returns a queue that will receive event dicts."""
|
||||
q = asyncio.Queue()
|
||||
_subscribers.append(q)
|
||||
return q
|
||||
|
||||
|
||||
def unsubscribe(q: asyncio.Queue):
|
||||
"""Unsubscribe from sync events."""
|
||||
try:
|
||||
_subscribers.remove(q)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
|
||||
async def _emit(event: dict):
|
||||
"""Push an event to all subscriber queues."""
|
||||
for q in _subscribers:
|
||||
try:
|
||||
q.put_nowait(event)
|
||||
except asyncio.QueueFull:
|
||||
pass
|
||||
|
||||
|
||||
async def get_sync_status():
|
||||
"""Get current sync status."""
|
||||
@@ -21,7 +48,25 @@ async def get_sync_status():
|
||||
return {"status": "idle"}
|
||||
|
||||
|
||||
async def run_sync(id_pol: int = None, id_sectie: int = None) -> dict:
|
||||
async def prepare_sync(id_pol: int = None, id_sectie: int = None) -> dict:
|
||||
"""Prepare a sync run - creates run_id and sets initial state.
|
||||
Returns {"run_id": ..., "status": "starting"} or {"error": ...} if already running.
|
||||
"""
|
||||
global _current_sync
|
||||
if _sync_lock.locked():
|
||||
return {"error": "Sync already running", "run_id": _current_sync.get("run_id") if _current_sync else None}
|
||||
|
||||
run_id = datetime.now().strftime("%Y%m%d_%H%M%S") + "_" + uuid.uuid4().hex[:6]
|
||||
_current_sync = {
|
||||
"run_id": run_id,
|
||||
"status": "running",
|
||||
"started_at": datetime.now().isoformat(),
|
||||
"progress": "Starting..."
|
||||
}
|
||||
return {"run_id": run_id, "status": "starting"}
|
||||
|
||||
|
||||
async def run_sync(id_pol: int = None, id_sectie: int = None, run_id: str = None) -> dict:
|
||||
"""Run a full sync cycle. Returns summary dict."""
|
||||
global _current_sync
|
||||
|
||||
@@ -29,30 +74,33 @@ async def run_sync(id_pol: int = None, id_sectie: int = None) -> dict:
|
||||
return {"error": "Sync already running"}
|
||||
|
||||
async with _sync_lock:
|
||||
run_id = datetime.now().strftime("%Y%m%d_%H%M%S") + "_" + uuid.uuid4().hex[:6]
|
||||
_current_sync = {
|
||||
"run_id": run_id,
|
||||
"status": "running",
|
||||
"started_at": datetime.now().isoformat(),
|
||||
"progress": "Reading JSON files..."
|
||||
}
|
||||
# Use provided run_id or generate one
|
||||
if not run_id:
|
||||
run_id = datetime.now().strftime("%Y%m%d_%H%M%S") + "_" + uuid.uuid4().hex[:6]
|
||||
_current_sync = {
|
||||
"run_id": run_id,
|
||||
"status": "running",
|
||||
"started_at": datetime.now().isoformat(),
|
||||
"progress": "Reading JSON files..."
|
||||
}
|
||||
|
||||
_current_sync["progress"] = "Reading JSON files..."
|
||||
await _emit({"type": "phase", "run_id": run_id, "message": "Reading JSON files..."})
|
||||
|
||||
try:
|
||||
# Step 1: Read orders
|
||||
orders, json_count = order_reader.read_json_orders()
|
||||
await sqlite_service.create_sync_run(run_id, json_count)
|
||||
await _emit({"type": "phase", "run_id": run_id, "message": f"Found {len(orders)} orders in {json_count} files"})
|
||||
|
||||
if not orders:
|
||||
await sqlite_service.update_sync_run(run_id, "completed", 0, 0, 0, 0)
|
||||
_current_sync = None
|
||||
return {
|
||||
"run_id": run_id,
|
||||
"status": "completed",
|
||||
"message": "No orders found",
|
||||
"json_files": json_count
|
||||
}
|
||||
summary = {"run_id": run_id, "status": "completed", "message": "No orders found", "json_files": json_count}
|
||||
await _emit({"type": "completed", "run_id": run_id, "summary": summary})
|
||||
return summary
|
||||
|
||||
_current_sync["progress"] = f"Validating {len(orders)} orders..."
|
||||
await _emit({"type": "phase", "run_id": run_id, "message": f"Validating {len(orders)} orders..."})
|
||||
|
||||
# Step 2a: Find new orders (not yet in Oracle)
|
||||
all_order_numbers = [o.number for o in orders]
|
||||
@@ -65,6 +113,8 @@ async def run_sync(id_pol: int = None, id_sectie: int = None) -> dict:
|
||||
validation = await asyncio.to_thread(validation_service.validate_skus, all_skus)
|
||||
importable, skipped = validation_service.classify_orders(orders, validation)
|
||||
|
||||
await _emit({"type": "phase", "run_id": run_id, "message": f"{len(importable)} importable, {len(skipped)} skipped (missing SKUs)"})
|
||||
|
||||
# Step 2c: Build SKU context from skipped orders
|
||||
sku_context = {} # {sku: {"orders": [], "customers": []}}
|
||||
for order, missing_skus_list in skipped:
|
||||
@@ -100,6 +150,7 @@ async def run_sync(id_pol: int = None, id_sectie: int = None) -> dict:
|
||||
id_pol = id_pol or settings.ID_POL
|
||||
if id_pol and importable:
|
||||
_current_sync["progress"] = "Validating prices..."
|
||||
await _emit({"type": "phase", "run_id": run_id, "message": "Validating prices..."})
|
||||
# Gather all CODMATs from importable orders
|
||||
all_codmats = set()
|
||||
for order in importable:
|
||||
@@ -124,7 +175,7 @@ async def run_sync(id_pol: int = None, id_sectie: int = None) -> dict:
|
||||
price_result["missing_price"], id_pol
|
||||
)
|
||||
|
||||
# Step 3: Record skipped orders
|
||||
# Step 3: Record skipped orders + emit events
|
||||
for order, missing_skus in skipped:
|
||||
customer = order.billing.company_name or \
|
||||
f"{order.billing.firstname} {order.billing.lastname}"
|
||||
@@ -137,13 +188,20 @@ async def run_sync(id_pol: int = None, id_sectie: int = None) -> dict:
|
||||
missing_skus=missing_skus,
|
||||
items_count=len(order.items)
|
||||
)
|
||||
await _emit({
|
||||
"type": "order_result", "run_id": run_id,
|
||||
"order_number": order.number, "customer_name": customer,
|
||||
"status": "SKIPPED", "missing_skus": missing_skus,
|
||||
"items_count": len(order.items), "progress": f"0/{len(importable)}"
|
||||
})
|
||||
|
||||
# Step 4: Import valid orders
|
||||
imported_count = 0
|
||||
error_count = 0
|
||||
|
||||
for i, order in enumerate(importable):
|
||||
_current_sync["progress"] = f"Importing {i+1}/{len(importable)}: #{order.number}"
|
||||
progress_str = f"{i+1}/{len(importable)}"
|
||||
_current_sync["progress"] = f"Importing {progress_str}: #{order.number}"
|
||||
|
||||
result = await asyncio.to_thread(
|
||||
import_service.import_single_order,
|
||||
@@ -164,6 +222,12 @@ async def run_sync(id_pol: int = None, id_sectie: int = None) -> dict:
|
||||
id_partener=result["id_partener"],
|
||||
items_count=len(order.items)
|
||||
)
|
||||
await _emit({
|
||||
"type": "order_result", "run_id": run_id,
|
||||
"order_number": order.number, "customer_name": customer,
|
||||
"status": "IMPORTED", "items_count": len(order.items),
|
||||
"id_comanda": result["id_comanda"], "progress": progress_str
|
||||
})
|
||||
else:
|
||||
error_count += 1
|
||||
await sqlite_service.add_import_order(
|
||||
@@ -176,6 +240,12 @@ async def run_sync(id_pol: int = None, id_sectie: int = None) -> dict:
|
||||
error_message=result["error"],
|
||||
items_count=len(order.items)
|
||||
)
|
||||
await _emit({
|
||||
"type": "order_result", "run_id": run_id,
|
||||
"order_number": order.number, "customer_name": customer,
|
||||
"status": "ERROR", "error_message": result["error"],
|
||||
"items_count": len(order.items), "progress": progress_str
|
||||
})
|
||||
|
||||
# Safety: stop if too many errors
|
||||
if error_count > 10:
|
||||
@@ -204,14 +274,22 @@ async def run_sync(id_pol: int = None, id_sectie: int = None) -> dict:
|
||||
f"Sync {run_id} completed: {imported_count} imported, "
|
||||
f"{len(skipped)} skipped, {error_count} errors"
|
||||
)
|
||||
await _emit({"type": "completed", "run_id": run_id, "summary": summary})
|
||||
return summary
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Sync {run_id} failed: {e}")
|
||||
await sqlite_service.update_sync_run(run_id, "failed", 0, 0, 0, 1)
|
||||
await sqlite_service.update_sync_run(run_id, "failed", 0, 0, 0, 1, error_message=str(e))
|
||||
_current_sync["error"] = str(e)
|
||||
await _emit({"type": "failed", "run_id": run_id, "error": str(e)})
|
||||
return {"run_id": run_id, "status": "failed", "error": str(e)}
|
||||
finally:
|
||||
_current_sync = None
|
||||
# Keep _current_sync for 10 seconds so status endpoint can show final result
|
||||
async def _clear_current_sync():
|
||||
await asyncio.sleep(10)
|
||||
global _current_sync
|
||||
_current_sync = None
|
||||
asyncio.ensure_future(_clear_current_sync())
|
||||
|
||||
|
||||
def stop_sync():
|
||||
|
||||
Reference in New Issue
Block a user