- deploy.ps1, iis-web.config: Windows Server deployment scripts - api/app/routers/sync.py, dashboard.py: router updates - api/app/services/import_service.py, sync_service.py: service updates - api/app/static/css/style.css, js/*.js: UI updates - api/database-scripts/08_PACK_FACTURARE.pck: Oracle package - .gitignore: add .gittoken - CLAUDE.md, agent configs: documentation updates Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
485 lines
19 KiB
Python
485 lines
19 KiB
Python
import asyncio
|
|
import json
|
|
from datetime import datetime
|
|
|
|
from fastapi import APIRouter, Request, BackgroundTasks
|
|
from fastapi.templating import Jinja2Templates
|
|
from fastapi.responses import HTMLResponse
|
|
from pydantic import BaseModel
|
|
from pathlib import Path
|
|
from typing import Optional
|
|
|
|
from ..services import sync_service, scheduler_service, sqlite_service, invoice_service
|
|
|
|
router = APIRouter(tags=["sync"])
|
|
templates = Jinja2Templates(directory=str(Path(__file__).parent.parent / "templates"))
|
|
|
|
|
|
class ScheduleConfig(BaseModel):
|
|
enabled: bool
|
|
interval_minutes: int = 5
|
|
|
|
|
|
class AppSettingsUpdate(BaseModel):
|
|
transport_codmat: str = ""
|
|
transport_vat: str = "21"
|
|
discount_codmat: str = ""
|
|
transport_id_pol: str = ""
|
|
discount_vat: str = "21"
|
|
discount_id_pol: str = ""
|
|
id_pol: str = ""
|
|
id_sectie: str = ""
|
|
|
|
|
|
# API endpoints
|
|
@router.post("/api/sync/start")
|
|
async def start_sync(background_tasks: BackgroundTasks):
|
|
"""Trigger a sync run in the background."""
|
|
result = await sync_service.prepare_sync()
|
|
if result.get("error"):
|
|
return {"error": result["error"], "run_id": result.get("run_id")}
|
|
|
|
run_id = result["run_id"]
|
|
background_tasks.add_task(sync_service.run_sync, run_id=run_id)
|
|
return {"message": "Sync started", "run_id": run_id}
|
|
|
|
|
|
@router.post("/api/sync/stop")
|
|
async def stop_sync():
|
|
"""Stop a running sync."""
|
|
sync_service.stop_sync()
|
|
return {"message": "Stop signal sent"}
|
|
|
|
|
|
@router.get("/api/sync/status")
|
|
async def sync_status():
|
|
"""Get current sync status with progress details and last_run info."""
|
|
status = await sync_service.get_sync_status()
|
|
|
|
# Build last_run from most recent completed/failed sync_runs row
|
|
current_run_id = status.get("run_id")
|
|
last_run = None
|
|
try:
|
|
from ..database import get_sqlite
|
|
db = await get_sqlite()
|
|
try:
|
|
if current_run_id:
|
|
cursor = await db.execute("""
|
|
SELECT * FROM sync_runs
|
|
WHERE status IN ('completed', 'failed') AND run_id != ?
|
|
ORDER BY started_at DESC LIMIT 1
|
|
""", (current_run_id,))
|
|
else:
|
|
cursor = await db.execute("""
|
|
SELECT * FROM sync_runs
|
|
WHERE status IN ('completed', 'failed')
|
|
ORDER BY started_at DESC LIMIT 1
|
|
""")
|
|
row = await cursor.fetchone()
|
|
if row:
|
|
row_dict = dict(row)
|
|
duration_seconds = None
|
|
if row_dict.get("started_at") and row_dict.get("finished_at"):
|
|
try:
|
|
dt_start = datetime.fromisoformat(row_dict["started_at"])
|
|
dt_end = datetime.fromisoformat(row_dict["finished_at"])
|
|
duration_seconds = int((dt_end - dt_start).total_seconds())
|
|
except (ValueError, TypeError):
|
|
pass
|
|
last_run = {
|
|
"run_id": row_dict.get("run_id"),
|
|
"started_at": row_dict.get("started_at"),
|
|
"finished_at": row_dict.get("finished_at"),
|
|
"duration_seconds": duration_seconds,
|
|
"status": row_dict.get("status"),
|
|
"imported": row_dict.get("imported", 0),
|
|
"skipped": row_dict.get("skipped", 0),
|
|
"errors": row_dict.get("errors", 0),
|
|
"already_imported": row_dict.get("already_imported", 0),
|
|
"new_imported": row_dict.get("new_imported", 0),
|
|
}
|
|
finally:
|
|
await db.close()
|
|
except Exception:
|
|
pass
|
|
|
|
# Ensure all expected keys are present
|
|
result = {
|
|
"status": status.get("status", "idle"),
|
|
"run_id": status.get("run_id"),
|
|
"started_at": status.get("started_at"),
|
|
"finished_at": status.get("finished_at"),
|
|
"phase": status.get("phase"),
|
|
"phase_text": status.get("phase_text"),
|
|
"progress_current": status.get("progress_current", 0),
|
|
"progress_total": status.get("progress_total", 0),
|
|
"counts": status.get("counts", {"imported": 0, "skipped": 0, "errors": 0}),
|
|
"last_run": last_run,
|
|
}
|
|
return result
|
|
|
|
|
|
@router.get("/api/sync/history")
|
|
async def sync_history(page: int = 1, per_page: int = 20):
|
|
"""Get sync run history."""
|
|
return await sqlite_service.get_sync_runs(page, per_page)
|
|
|
|
|
|
@router.get("/logs", response_class=HTMLResponse)
|
|
async def logs_page(request: Request, run: str = None):
|
|
return templates.TemplateResponse("logs.html", {"request": request, "selected_run": run or ""})
|
|
|
|
|
|
@router.get("/api/sync/run/{run_id}")
|
|
async def sync_run_detail(run_id: str):
|
|
"""Get details for a specific sync run."""
|
|
detail = await sqlite_service.get_sync_run_detail(run_id)
|
|
if not detail:
|
|
return {"error": "Run not found"}
|
|
return detail
|
|
|
|
|
|
@router.get("/api/sync/run/{run_id}/log")
|
|
async def sync_run_log(run_id: str):
|
|
"""Get detailed log per order for a sync run."""
|
|
detail = await sqlite_service.get_sync_run_detail(run_id)
|
|
if not detail:
|
|
return {"error": "Run not found", "status_code": 404}
|
|
orders = detail.get("orders", [])
|
|
return {
|
|
"run_id": run_id,
|
|
"run": detail.get("run", {}),
|
|
"orders": [
|
|
{
|
|
"order_number": o.get("order_number"),
|
|
"order_date": o.get("order_date"),
|
|
"customer_name": o.get("customer_name"),
|
|
"items_count": o.get("items_count"),
|
|
"status": o.get("status"),
|
|
"id_comanda": o.get("id_comanda"),
|
|
"id_partener": o.get("id_partener"),
|
|
"error_message": o.get("error_message"),
|
|
"missing_skus": o.get("missing_skus"),
|
|
"order_total": o.get("order_total"),
|
|
"factura_numar": o.get("factura_numar"),
|
|
"factura_serie": o.get("factura_serie"),
|
|
}
|
|
for o in orders
|
|
]
|
|
}
|
|
|
|
|
|
def _format_text_log_from_detail(detail: dict) -> str:
|
|
"""Build a text log from SQLite stored data for completed runs."""
|
|
run = detail.get("run", {})
|
|
orders = detail.get("orders", [])
|
|
|
|
run_id = run.get("run_id", "?")
|
|
started = run.get("started_at", "")
|
|
|
|
lines = [f"=== Sync Run {run_id} ==="]
|
|
if started:
|
|
try:
|
|
dt = datetime.fromisoformat(started)
|
|
lines.append(f"Inceput: {dt.strftime('%d.%m.%Y %H:%M:%S')}")
|
|
except (ValueError, TypeError):
|
|
lines.append(f"Inceput: {started}")
|
|
lines.append("")
|
|
|
|
for o in orders:
|
|
status = (o.get("status") or "").upper()
|
|
number = o.get("order_number", "?")
|
|
customer = o.get("customer_name", "?")
|
|
order_date = o.get("order_date") or "?"
|
|
|
|
if status == "IMPORTED":
|
|
id_cmd = o.get("id_comanda", "?")
|
|
lines.append(f"#{number} [{order_date}] {customer} → IMPORTAT (ID: {id_cmd})")
|
|
elif status == "ALREADY_IMPORTED":
|
|
id_cmd = o.get("id_comanda", "?")
|
|
lines.append(f"#{number} [{order_date}] {customer} → DEJA IMPORTAT (ID: {id_cmd})")
|
|
elif status == "SKIPPED":
|
|
missing = o.get("missing_skus", "")
|
|
if isinstance(missing, str):
|
|
try:
|
|
missing = json.loads(missing)
|
|
except (json.JSONDecodeError, TypeError):
|
|
missing = [missing] if missing else []
|
|
skus_str = ", ".join(missing) if isinstance(missing, list) else str(missing)
|
|
lines.append(f"#{number} [{order_date}] {customer} → OMIS (lipsa: {skus_str})")
|
|
elif status == "ERROR":
|
|
err = o.get("error_message", "necunoscuta")
|
|
lines.append(f"#{number} [{order_date}] {customer} → EROARE: {err}")
|
|
|
|
# Summary line
|
|
lines.append("")
|
|
total = run.get("total_orders", 0)
|
|
imported = run.get("imported", 0)
|
|
skipped = run.get("skipped", 0)
|
|
errors = run.get("errors", 0)
|
|
|
|
duration_str = ""
|
|
finished = run.get("finished_at", "")
|
|
if started and finished:
|
|
try:
|
|
dt_start = datetime.fromisoformat(started)
|
|
dt_end = datetime.fromisoformat(finished)
|
|
secs = int((dt_end - dt_start).total_seconds())
|
|
duration_str = f" | Durata: {secs}s"
|
|
except (ValueError, TypeError):
|
|
pass
|
|
|
|
already = run.get("already_imported", 0)
|
|
new_imp = run.get("new_imported", 0)
|
|
if already:
|
|
lines.append(f"Finalizat: {new_imp} importate, {already} deja importate, {skipped} nemapate, {errors} erori din {total} comenzi{duration_str}")
|
|
else:
|
|
lines.append(f"Finalizat: {imported} importate, {skipped} nemapate, {errors} erori din {total} comenzi{duration_str}")
|
|
|
|
return "\n".join(lines)
|
|
|
|
|
|
@router.get("/api/sync/run/{run_id}/text-log")
|
|
async def sync_run_text_log(run_id: str):
|
|
"""Get text log for a sync run - live from memory or reconstructed from SQLite."""
|
|
# Check in-memory first (active/recent runs)
|
|
live_log = sync_service.get_run_text_log(run_id)
|
|
if live_log is not None:
|
|
status = "running"
|
|
current = await sync_service.get_sync_status()
|
|
if current.get("run_id") != run_id or current.get("status") != "running":
|
|
status = "completed"
|
|
return {"text": live_log, "status": status, "finished": status != "running"}
|
|
|
|
# Fall back to SQLite for historical runs
|
|
detail = await sqlite_service.get_sync_run_detail(run_id)
|
|
if not detail:
|
|
return {"error": "Run not found", "text": "", "status": "unknown", "finished": True}
|
|
|
|
run = detail.get("run", {})
|
|
text = _format_text_log_from_detail(detail)
|
|
status = run.get("status", "completed")
|
|
return {"text": text, "status": status, "finished": True}
|
|
|
|
|
|
@router.get("/api/sync/run/{run_id}/orders")
|
|
async def sync_run_orders(run_id: str, status: str = "all", page: int = 1, per_page: int = 50,
|
|
sort_by: str = "order_date", sort_dir: str = "desc"):
|
|
"""Get filtered, paginated orders for a sync run (R1)."""
|
|
return await sqlite_service.get_run_orders_filtered(run_id, status, page, per_page,
|
|
sort_by=sort_by, sort_dir=sort_dir)
|
|
|
|
|
|
def _get_articole_terti_for_skus(skus: set) -> dict:
|
|
"""Query ARTICOLE_TERTI for all active codmat/cantitate/procent per SKU."""
|
|
from .. import database
|
|
result = {}
|
|
sku_list = list(skus)
|
|
conn = database.get_oracle_connection()
|
|
try:
|
|
with conn.cursor() as cur:
|
|
for i in range(0, len(sku_list), 500):
|
|
batch = sku_list[i:i+500]
|
|
placeholders = ",".join([f":s{j}" for j in range(len(batch))])
|
|
params = {f"s{j}": sku for j, sku in enumerate(batch)}
|
|
cur.execute(f"""
|
|
SELECT at.sku, at.codmat, at.cantitate_roa, at.procent_pret,
|
|
na.denumire
|
|
FROM ARTICOLE_TERTI at
|
|
LEFT JOIN NOM_ARTICOLE na ON na.codmat = at.codmat AND na.sters = 0 AND na.inactiv = 0
|
|
WHERE at.sku IN ({placeholders}) AND at.activ = 1 AND at.sters = 0
|
|
ORDER BY at.sku, at.codmat
|
|
""", params)
|
|
for row in cur:
|
|
sku = row[0]
|
|
if sku not in result:
|
|
result[sku] = []
|
|
result[sku].append({
|
|
"codmat": row[1],
|
|
"cantitate_roa": float(row[2]) if row[2] else 1,
|
|
"procent_pret": float(row[3]) if row[3] else 100,
|
|
"denumire": row[4] or ""
|
|
})
|
|
finally:
|
|
database.pool.release(conn)
|
|
return result
|
|
|
|
|
|
@router.get("/api/sync/order/{order_number}")
|
|
async def order_detail(order_number: str):
|
|
"""Get order detail with line items (R9), enriched with ARTICOLE_TERTI data."""
|
|
detail = await sqlite_service.get_order_detail(order_number)
|
|
if not detail:
|
|
return {"error": "Order not found"}
|
|
|
|
# Enrich items with ARTICOLE_TERTI mappings from Oracle
|
|
items = detail.get("items", [])
|
|
skus = {item["sku"] for item in items if item.get("sku")}
|
|
if skus:
|
|
codmat_map = await asyncio.to_thread(_get_articole_terti_for_skus, skus)
|
|
for item in items:
|
|
sku = item.get("sku")
|
|
if sku and sku in codmat_map:
|
|
item["codmat_details"] = codmat_map[sku]
|
|
|
|
return detail
|
|
|
|
|
|
@router.get("/api/dashboard/orders")
|
|
async def dashboard_orders(page: int = 1, per_page: int = 50,
|
|
search: str = "", status: str = "all",
|
|
sort_by: str = "order_date", sort_dir: str = "desc",
|
|
period_days: int = 7,
|
|
period_start: str = "", period_end: str = ""):
|
|
"""Get orders for dashboard, enriched with invoice data.
|
|
|
|
period_days=0 with period_start/period_end uses custom date range.
|
|
period_days=0 without dates means all time.
|
|
"""
|
|
is_uninvoiced_filter = (status == "UNINVOICED")
|
|
is_invoiced_filter = (status == "INVOICED")
|
|
|
|
# For UNINVOICED/INVOICED: fetch all IMPORTED orders, then filter post-invoice-check
|
|
fetch_status = "IMPORTED" if (is_uninvoiced_filter or is_invoiced_filter) else status
|
|
fetch_per_page = 10000 if (is_uninvoiced_filter or is_invoiced_filter) else per_page
|
|
fetch_page = 1 if (is_uninvoiced_filter or is_invoiced_filter) else page
|
|
|
|
result = await sqlite_service.get_orders(
|
|
page=fetch_page, per_page=fetch_per_page, search=search,
|
|
status_filter=fetch_status, sort_by=sort_by, sort_dir=sort_dir,
|
|
period_days=period_days,
|
|
period_start=period_start if period_days == 0 else "",
|
|
period_end=period_end if period_days == 0 else "",
|
|
)
|
|
|
|
# Enrich orders with invoice data — prefer SQLite cache, fallback to Oracle
|
|
all_orders = result["orders"]
|
|
for o in all_orders:
|
|
if o.get("factura_numar"):
|
|
# Use cached invoice data from SQLite
|
|
o["invoice"] = {
|
|
"facturat": True,
|
|
"serie_act": o.get("factura_serie"),
|
|
"numar_act": o.get("factura_numar"),
|
|
"total_fara_tva": o.get("factura_total_fara_tva"),
|
|
"total_tva": o.get("factura_total_tva"),
|
|
"total_cu_tva": o.get("factura_total_cu_tva"),
|
|
}
|
|
else:
|
|
o["invoice"] = None
|
|
|
|
# For orders without cached invoice, check Oracle (only uncached imported orders)
|
|
uncached_orders = [o for o in all_orders if o.get("id_comanda") and not o.get("invoice")]
|
|
if uncached_orders:
|
|
try:
|
|
id_comanda_list = [o["id_comanda"] for o in uncached_orders]
|
|
invoice_data = await asyncio.to_thread(
|
|
invoice_service.check_invoices_for_orders, id_comanda_list
|
|
)
|
|
for o in uncached_orders:
|
|
idc = o.get("id_comanda")
|
|
if idc and idc in invoice_data:
|
|
o["invoice"] = invoice_data[idc]
|
|
except Exception:
|
|
pass
|
|
|
|
# Add shipping/billing name fields + is_different_person flag
|
|
s_name = o.get("shipping_name") or ""
|
|
b_name = o.get("billing_name") or ""
|
|
o["shipping_name"] = s_name
|
|
o["billing_name"] = b_name
|
|
o["is_different_person"] = bool(s_name and b_name and s_name != b_name)
|
|
|
|
# Use counts from sqlite_service (already period-scoped)
|
|
counts = result.get("counts", {})
|
|
# Prefer SQLite-based uninvoiced count (covers full period, not just current page)
|
|
counts["nefacturate"] = counts.get("uninvoiced_sqlite", sum(
|
|
1 for o in all_orders
|
|
if o.get("status") in ("IMPORTED", "ALREADY_IMPORTED") and not o.get("invoice")
|
|
))
|
|
imported_total = counts.get("imported_all") or counts.get("imported", 0)
|
|
counts["facturate"] = max(0, imported_total - counts["nefacturate"])
|
|
counts.setdefault("total", counts.get("imported", 0) + counts.get("skipped", 0) + counts.get("error", 0))
|
|
|
|
# For UNINVOICED filter: apply server-side filtering + pagination
|
|
if is_uninvoiced_filter:
|
|
filtered = [o for o in all_orders if o.get("status") in ("IMPORTED", "ALREADY_IMPORTED") and not o.get("invoice")]
|
|
total = len(filtered)
|
|
offset = (page - 1) * per_page
|
|
result["orders"] = filtered[offset:offset + per_page]
|
|
result["total"] = total
|
|
result["page"] = page
|
|
result["per_page"] = per_page
|
|
result["pages"] = (total + per_page - 1) // per_page if total > 0 else 0
|
|
elif is_invoiced_filter:
|
|
filtered = [o for o in all_orders if o.get("status") in ("IMPORTED", "ALREADY_IMPORTED") and o.get("invoice")]
|
|
total = len(filtered)
|
|
offset = (page - 1) * per_page
|
|
result["orders"] = filtered[offset:offset + per_page]
|
|
result["total"] = total
|
|
result["page"] = page
|
|
result["per_page"] = per_page
|
|
result["pages"] = (total + per_page - 1) // per_page if total > 0 else 0
|
|
|
|
# Reshape response
|
|
return {
|
|
"orders": result["orders"],
|
|
"pagination": {
|
|
"page": result.get("page", page),
|
|
"per_page": result.get("per_page", per_page),
|
|
"total_pages": result.get("pages", 0),
|
|
},
|
|
"counts": counts,
|
|
}
|
|
|
|
|
|
@router.put("/api/sync/schedule")
|
|
async def update_schedule(config: ScheduleConfig):
|
|
"""Update scheduler configuration."""
|
|
if config.enabled:
|
|
scheduler_service.start_scheduler(config.interval_minutes)
|
|
else:
|
|
scheduler_service.stop_scheduler()
|
|
|
|
# Persist config
|
|
await sqlite_service.set_scheduler_config("enabled", str(config.enabled))
|
|
await sqlite_service.set_scheduler_config("interval_minutes", str(config.interval_minutes))
|
|
|
|
return scheduler_service.get_scheduler_status()
|
|
|
|
|
|
@router.get("/api/sync/schedule")
|
|
async def get_schedule():
|
|
"""Get current scheduler status."""
|
|
return scheduler_service.get_scheduler_status()
|
|
|
|
|
|
@router.get("/api/settings")
|
|
async def get_app_settings():
|
|
"""Get application settings."""
|
|
settings = await sqlite_service.get_app_settings()
|
|
return {
|
|
"transport_codmat": settings.get("transport_codmat", ""),
|
|
"transport_vat": settings.get("transport_vat", "21"),
|
|
"discount_codmat": settings.get("discount_codmat", ""),
|
|
"transport_id_pol": settings.get("transport_id_pol", ""),
|
|
"discount_vat": settings.get("discount_vat", "19"),
|
|
"discount_id_pol": settings.get("discount_id_pol", ""),
|
|
"id_pol": settings.get("id_pol", ""),
|
|
"id_sectie": settings.get("id_sectie", ""),
|
|
}
|
|
|
|
|
|
@router.put("/api/settings")
|
|
async def update_app_settings(config: AppSettingsUpdate):
|
|
"""Update application settings."""
|
|
await sqlite_service.set_app_setting("transport_codmat", config.transport_codmat)
|
|
await sqlite_service.set_app_setting("transport_vat", config.transport_vat)
|
|
await sqlite_service.set_app_setting("discount_codmat", config.discount_codmat)
|
|
await sqlite_service.set_app_setting("transport_id_pol", config.transport_id_pol)
|
|
await sqlite_service.set_app_setting("discount_vat", config.discount_vat)
|
|
await sqlite_service.set_app_setting("discount_id_pol", config.discount_id_pol)
|
|
await sqlite_service.set_app_setting("id_pol", config.id_pol)
|
|
await sqlite_service.set_app_setting("id_sectie", config.id_sectie)
|
|
return {"success": True}
|