feat(sync): add SSE live feed, unified logs page, fix Oracle connection
- Add SSE event bus in sync_service (subscribe/unsubscribe/_emit) - Add GET /api/sync/stream SSE endpoint for real-time sync progress - Rewrite logs.html: unified runs table + live feed + summary + filters - Rewrite logs.js: SSE EventSource client, run selection, pagination - Dashboard: clickable runs navigate to /logs?run=, sync started banner - Remove "Import Comenzi" nav item, delete sync_detail.html - Add error_message column to sync_runs table with migration - Fix: export TNS_ADMIN as OS env var so oracledb finds tnsnames.ora - Fix: use get_oracle_connection() instead of direct pool.acquire() - Fix: CRM_POLITICI_PRET_ART INSERT to match actual table schema Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -91,6 +91,8 @@ def import_single_order(order, id_pol: int = None, id_sectie: int = None) -> dic
|
||||
order_number = clean_web_text(order.number)
|
||||
order_date = convert_web_date(order.date)
|
||||
|
||||
if database.pool is None:
|
||||
raise RuntimeError("Oracle pool not initialized")
|
||||
with database.pool.acquire() as conn:
|
||||
with conn.cursor() as cur:
|
||||
# Step 1: Process partner
|
||||
|
||||
@@ -20,7 +20,8 @@ async def create_sync_run(run_id: str, json_files: int = 0):
|
||||
|
||||
|
||||
async def update_sync_run(run_id: str, status: str, total_orders: int = 0,
|
||||
imported: int = 0, skipped: int = 0, errors: int = 0):
|
||||
imported: int = 0, skipped: int = 0, errors: int = 0,
|
||||
error_message: str = None):
|
||||
"""Update sync run with results."""
|
||||
db = await get_sqlite()
|
||||
try:
|
||||
@@ -31,9 +32,10 @@ async def update_sync_run(run_id: str, status: str, total_orders: int = 0,
|
||||
total_orders = ?,
|
||||
imported = ?,
|
||||
skipped = ?,
|
||||
errors = ?
|
||||
errors = ?,
|
||||
error_message = ?
|
||||
WHERE run_id = ?
|
||||
""", (status, total_orders, imported, skipped, errors, run_id))
|
||||
""", (status, total_orders, imported, skipped, errors, error_message, run_id))
|
||||
await db.commit()
|
||||
finally:
|
||||
await db.close()
|
||||
|
||||
@@ -13,6 +13,33 @@ logger = logging.getLogger(__name__)
|
||||
_sync_lock = asyncio.Lock()
|
||||
_current_sync = None # dict with run_id, status, progress info
|
||||
|
||||
# SSE subscriber system
|
||||
_subscribers: list[asyncio.Queue] = []
|
||||
|
||||
|
||||
def subscribe() -> asyncio.Queue:
|
||||
"""Subscribe to sync events. Returns a queue that will receive event dicts."""
|
||||
q = asyncio.Queue()
|
||||
_subscribers.append(q)
|
||||
return q
|
||||
|
||||
|
||||
def unsubscribe(q: asyncio.Queue):
|
||||
"""Unsubscribe from sync events."""
|
||||
try:
|
||||
_subscribers.remove(q)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
|
||||
async def _emit(event: dict):
|
||||
"""Push an event to all subscriber queues."""
|
||||
for q in _subscribers:
|
||||
try:
|
||||
q.put_nowait(event)
|
||||
except asyncio.QueueFull:
|
||||
pass
|
||||
|
||||
|
||||
async def get_sync_status():
|
||||
"""Get current sync status."""
|
||||
@@ -21,7 +48,25 @@ async def get_sync_status():
|
||||
return {"status": "idle"}
|
||||
|
||||
|
||||
async def run_sync(id_pol: int = None, id_sectie: int = None) -> dict:
|
||||
async def prepare_sync(id_pol: int = None, id_sectie: int = None) -> dict:
|
||||
"""Prepare a sync run - creates run_id and sets initial state.
|
||||
Returns {"run_id": ..., "status": "starting"} or {"error": ...} if already running.
|
||||
"""
|
||||
global _current_sync
|
||||
if _sync_lock.locked():
|
||||
return {"error": "Sync already running", "run_id": _current_sync.get("run_id") if _current_sync else None}
|
||||
|
||||
run_id = datetime.now().strftime("%Y%m%d_%H%M%S") + "_" + uuid.uuid4().hex[:6]
|
||||
_current_sync = {
|
||||
"run_id": run_id,
|
||||
"status": "running",
|
||||
"started_at": datetime.now().isoformat(),
|
||||
"progress": "Starting..."
|
||||
}
|
||||
return {"run_id": run_id, "status": "starting"}
|
||||
|
||||
|
||||
async def run_sync(id_pol: int = None, id_sectie: int = None, run_id: str = None) -> dict:
|
||||
"""Run a full sync cycle. Returns summary dict."""
|
||||
global _current_sync
|
||||
|
||||
@@ -29,30 +74,33 @@ async def run_sync(id_pol: int = None, id_sectie: int = None) -> dict:
|
||||
return {"error": "Sync already running"}
|
||||
|
||||
async with _sync_lock:
|
||||
run_id = datetime.now().strftime("%Y%m%d_%H%M%S") + "_" + uuid.uuid4().hex[:6]
|
||||
_current_sync = {
|
||||
"run_id": run_id,
|
||||
"status": "running",
|
||||
"started_at": datetime.now().isoformat(),
|
||||
"progress": "Reading JSON files..."
|
||||
}
|
||||
# Use provided run_id or generate one
|
||||
if not run_id:
|
||||
run_id = datetime.now().strftime("%Y%m%d_%H%M%S") + "_" + uuid.uuid4().hex[:6]
|
||||
_current_sync = {
|
||||
"run_id": run_id,
|
||||
"status": "running",
|
||||
"started_at": datetime.now().isoformat(),
|
||||
"progress": "Reading JSON files..."
|
||||
}
|
||||
|
||||
_current_sync["progress"] = "Reading JSON files..."
|
||||
await _emit({"type": "phase", "run_id": run_id, "message": "Reading JSON files..."})
|
||||
|
||||
try:
|
||||
# Step 1: Read orders
|
||||
orders, json_count = order_reader.read_json_orders()
|
||||
await sqlite_service.create_sync_run(run_id, json_count)
|
||||
await _emit({"type": "phase", "run_id": run_id, "message": f"Found {len(orders)} orders in {json_count} files"})
|
||||
|
||||
if not orders:
|
||||
await sqlite_service.update_sync_run(run_id, "completed", 0, 0, 0, 0)
|
||||
_current_sync = None
|
||||
return {
|
||||
"run_id": run_id,
|
||||
"status": "completed",
|
||||
"message": "No orders found",
|
||||
"json_files": json_count
|
||||
}
|
||||
summary = {"run_id": run_id, "status": "completed", "message": "No orders found", "json_files": json_count}
|
||||
await _emit({"type": "completed", "run_id": run_id, "summary": summary})
|
||||
return summary
|
||||
|
||||
_current_sync["progress"] = f"Validating {len(orders)} orders..."
|
||||
await _emit({"type": "phase", "run_id": run_id, "message": f"Validating {len(orders)} orders..."})
|
||||
|
||||
# Step 2a: Find new orders (not yet in Oracle)
|
||||
all_order_numbers = [o.number for o in orders]
|
||||
@@ -65,6 +113,8 @@ async def run_sync(id_pol: int = None, id_sectie: int = None) -> dict:
|
||||
validation = await asyncio.to_thread(validation_service.validate_skus, all_skus)
|
||||
importable, skipped = validation_service.classify_orders(orders, validation)
|
||||
|
||||
await _emit({"type": "phase", "run_id": run_id, "message": f"{len(importable)} importable, {len(skipped)} skipped (missing SKUs)"})
|
||||
|
||||
# Step 2c: Build SKU context from skipped orders
|
||||
sku_context = {} # {sku: {"orders": [], "customers": []}}
|
||||
for order, missing_skus_list in skipped:
|
||||
@@ -100,6 +150,7 @@ async def run_sync(id_pol: int = None, id_sectie: int = None) -> dict:
|
||||
id_pol = id_pol or settings.ID_POL
|
||||
if id_pol and importable:
|
||||
_current_sync["progress"] = "Validating prices..."
|
||||
await _emit({"type": "phase", "run_id": run_id, "message": "Validating prices..."})
|
||||
# Gather all CODMATs from importable orders
|
||||
all_codmats = set()
|
||||
for order in importable:
|
||||
@@ -124,7 +175,7 @@ async def run_sync(id_pol: int = None, id_sectie: int = None) -> dict:
|
||||
price_result["missing_price"], id_pol
|
||||
)
|
||||
|
||||
# Step 3: Record skipped orders
|
||||
# Step 3: Record skipped orders + emit events
|
||||
for order, missing_skus in skipped:
|
||||
customer = order.billing.company_name or \
|
||||
f"{order.billing.firstname} {order.billing.lastname}"
|
||||
@@ -137,13 +188,20 @@ async def run_sync(id_pol: int = None, id_sectie: int = None) -> dict:
|
||||
missing_skus=missing_skus,
|
||||
items_count=len(order.items)
|
||||
)
|
||||
await _emit({
|
||||
"type": "order_result", "run_id": run_id,
|
||||
"order_number": order.number, "customer_name": customer,
|
||||
"status": "SKIPPED", "missing_skus": missing_skus,
|
||||
"items_count": len(order.items), "progress": f"0/{len(importable)}"
|
||||
})
|
||||
|
||||
# Step 4: Import valid orders
|
||||
imported_count = 0
|
||||
error_count = 0
|
||||
|
||||
for i, order in enumerate(importable):
|
||||
_current_sync["progress"] = f"Importing {i+1}/{len(importable)}: #{order.number}"
|
||||
progress_str = f"{i+1}/{len(importable)}"
|
||||
_current_sync["progress"] = f"Importing {progress_str}: #{order.number}"
|
||||
|
||||
result = await asyncio.to_thread(
|
||||
import_service.import_single_order,
|
||||
@@ -164,6 +222,12 @@ async def run_sync(id_pol: int = None, id_sectie: int = None) -> dict:
|
||||
id_partener=result["id_partener"],
|
||||
items_count=len(order.items)
|
||||
)
|
||||
await _emit({
|
||||
"type": "order_result", "run_id": run_id,
|
||||
"order_number": order.number, "customer_name": customer,
|
||||
"status": "IMPORTED", "items_count": len(order.items),
|
||||
"id_comanda": result["id_comanda"], "progress": progress_str
|
||||
})
|
||||
else:
|
||||
error_count += 1
|
||||
await sqlite_service.add_import_order(
|
||||
@@ -176,6 +240,12 @@ async def run_sync(id_pol: int = None, id_sectie: int = None) -> dict:
|
||||
error_message=result["error"],
|
||||
items_count=len(order.items)
|
||||
)
|
||||
await _emit({
|
||||
"type": "order_result", "run_id": run_id,
|
||||
"order_number": order.number, "customer_name": customer,
|
||||
"status": "ERROR", "error_message": result["error"],
|
||||
"items_count": len(order.items), "progress": progress_str
|
||||
})
|
||||
|
||||
# Safety: stop if too many errors
|
||||
if error_count > 10:
|
||||
@@ -204,14 +274,22 @@ async def run_sync(id_pol: int = None, id_sectie: int = None) -> dict:
|
||||
f"Sync {run_id} completed: {imported_count} imported, "
|
||||
f"{len(skipped)} skipped, {error_count} errors"
|
||||
)
|
||||
await _emit({"type": "completed", "run_id": run_id, "summary": summary})
|
||||
return summary
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Sync {run_id} failed: {e}")
|
||||
await sqlite_service.update_sync_run(run_id, "failed", 0, 0, 0, 1)
|
||||
await sqlite_service.update_sync_run(run_id, "failed", 0, 0, 0, 1, error_message=str(e))
|
||||
_current_sync["error"] = str(e)
|
||||
await _emit({"type": "failed", "run_id": run_id, "error": str(e)})
|
||||
return {"run_id": run_id, "status": "failed", "error": str(e)}
|
||||
finally:
|
||||
_current_sync = None
|
||||
# Keep _current_sync for 10 seconds so status endpoint can show final result
|
||||
async def _clear_current_sync():
|
||||
await asyncio.sleep(10)
|
||||
global _current_sync
|
||||
_current_sync = None
|
||||
asyncio.ensure_future(_clear_current_sync())
|
||||
|
||||
|
||||
def stop_sync():
|
||||
|
||||
@@ -17,7 +17,8 @@ def validate_skus(skus: set[str]) -> dict:
|
||||
direct = set()
|
||||
sku_list = list(skus)
|
||||
|
||||
with database.pool.acquire() as conn:
|
||||
conn = database.get_oracle_connection()
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
# Check in batches of 500
|
||||
for i in range(0, len(sku_list), 500):
|
||||
@@ -44,6 +45,8 @@ def validate_skus(skus: set[str]) -> dict:
|
||||
""", params2)
|
||||
for row in cur:
|
||||
direct.add(row[0])
|
||||
finally:
|
||||
database.pool.release(conn)
|
||||
|
||||
missing = skus - mapped - direct
|
||||
|
||||
@@ -80,7 +83,8 @@ def find_new_orders(order_numbers: list[str]) -> set[str]:
|
||||
existing = set()
|
||||
num_list = list(order_numbers)
|
||||
|
||||
with database.pool.acquire() as conn:
|
||||
conn = database.get_oracle_connection()
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
for i in range(0, len(num_list), 500):
|
||||
batch = num_list[i:i+500]
|
||||
@@ -93,6 +97,8 @@ def find_new_orders(order_numbers: list[str]) -> set[str]:
|
||||
""", params)
|
||||
for row in cur:
|
||||
existing.add(row[0])
|
||||
finally:
|
||||
database.pool.release(conn)
|
||||
|
||||
new_orders = set(order_numbers) - existing
|
||||
logger.info(f"Order check: {len(new_orders)} new, {len(existing)} already exist out of {len(order_numbers)} total")
|
||||
@@ -109,7 +115,8 @@ def validate_prices(codmats: set[str], id_pol: int) -> dict:
|
||||
ids_with_price = set()
|
||||
codmat_list = list(codmats)
|
||||
|
||||
with database.pool.acquire() as conn:
|
||||
conn = database.get_oracle_connection()
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
# Step 1: Get ID_ARTICOL for each CODMAT
|
||||
for i in range(0, len(codmat_list), 500):
|
||||
@@ -138,6 +145,8 @@ def validate_prices(codmats: set[str], id_pol: int) -> dict:
|
||||
""", params)
|
||||
for row in cur:
|
||||
ids_with_price.add(row[0])
|
||||
finally:
|
||||
database.pool.release(conn)
|
||||
|
||||
# Map back to CODMATs
|
||||
has_price = {cm for cm, aid in codmat_to_id.items() if aid in ids_with_price}
|
||||
@@ -151,7 +160,8 @@ def ensure_prices(codmats: set[str], id_pol: int):
|
||||
if not codmats:
|
||||
return
|
||||
|
||||
with database.pool.acquire() as conn:
|
||||
conn = database.get_oracle_connection()
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
# Get ID_VALUTA for this policy
|
||||
cur.execute("""
|
||||
@@ -176,16 +186,18 @@ def ensure_prices(codmats: set[str], id_pol: int):
|
||||
|
||||
cur.execute("""
|
||||
INSERT INTO CRM_POLITICI_PRET_ART
|
||||
(ID_POL_ART, ID_POL, ID_ARTICOL, PRET, ID_COMANDA, ID_VALUTA,
|
||||
ID_UTIL, DATAORA, PROC_TVAV, ID_PARTR, ID_PARTZ,
|
||||
PRETFTVA, PRETCTVA, CANTITATE, ID_UM, PRET_MIN, PRET_MIN_TVA)
|
||||
(ID_POL_ART, ID_POL, ID_ARTICOL, PRET, ID_VALUTA,
|
||||
ID_UTIL, DATAORA, PROC_TVAV,
|
||||
PRETFTVA, PRETCTVA)
|
||||
VALUES
|
||||
(SEQ_CRM_POLITICI_PRET_ART.NEXTVAL, :id_pol, :id_articol, 0, NULL, :id_valuta,
|
||||
-3, SYSDATE, 1.19, NULL, NULL,
|
||||
0, 0, 0, NULL, 0, 0)
|
||||
(SEQ_CRM_POLITICI_PRET_ART.NEXTVAL, :id_pol, :id_articol, 0, :id_valuta,
|
||||
-3, SYSDATE, 1.19,
|
||||
0, 0)
|
||||
""", {"id_pol": id_pol, "id_articol": id_articol, "id_valuta": id_valuta})
|
||||
logger.info(f"Pret 0 adaugat pentru CODMAT {codmat} in politica {id_pol}")
|
||||
|
||||
conn.commit()
|
||||
finally:
|
||||
database.pool.release(conn)
|
||||
|
||||
logger.info(f"Ensure prices done: {len(codmats)} CODMATs processed for policy {id_pol}")
|
||||
|
||||
Reference in New Issue
Block a user