fix(dashboard): update sync card after completion + use Bucharest timezone
Sync card was showing previous run data after sync completed because the
last_run query excluded the current run_id even after it finished. Now only
excludes during active running state.
All datetime.now() and SQLite datetime('now') replaced with Europe/Bucharest
timezone to fix times displayed 2 hours behind (was using UTC).
Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
@@ -1,8 +1,16 @@
|
||||
import json
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from zoneinfo import ZoneInfo
|
||||
from ..database import get_sqlite, get_sqlite_sync
|
||||
|
||||
_tz_bucharest = ZoneInfo("Europe/Bucharest")
|
||||
|
||||
|
||||
def _now_str():
|
||||
"""Return current Bucharest time as ISO string."""
|
||||
return datetime.now(_tz_bucharest).replace(tzinfo=None).isoformat()
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -12,8 +20,8 @@ async def create_sync_run(run_id: str, json_files: int = 0):
|
||||
try:
|
||||
await db.execute("""
|
||||
INSERT INTO sync_runs (run_id, started_at, status, json_files)
|
||||
VALUES (?, datetime('now'), 'running', ?)
|
||||
""", (run_id, json_files))
|
||||
VALUES (?, ?, 'running', ?)
|
||||
""", (run_id, _now_str(), json_files))
|
||||
await db.commit()
|
||||
finally:
|
||||
await db.close()
|
||||
@@ -28,7 +36,7 @@ async def update_sync_run(run_id: str, status: str, total_orders: int = 0,
|
||||
try:
|
||||
await db.execute("""
|
||||
UPDATE sync_runs SET
|
||||
finished_at = datetime('now'),
|
||||
finished_at = ?,
|
||||
status = ?,
|
||||
total_orders = ?,
|
||||
imported = ?,
|
||||
@@ -38,7 +46,7 @@ async def update_sync_run(run_id: str, status: str, total_orders: int = 0,
|
||||
already_imported = ?,
|
||||
new_imported = ?
|
||||
WHERE run_id = ?
|
||||
""", (status, total_orders, imported, skipped, errors, error_message,
|
||||
""", (_now_str(), status, total_orders, imported, skipped, errors, error_message,
|
||||
already_imported, new_imported, run_id))
|
||||
await db.commit()
|
||||
finally:
|
||||
|
||||
@@ -3,6 +3,14 @@ import json
|
||||
import logging
|
||||
import uuid
|
||||
from datetime import datetime, timedelta
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
_tz_bucharest = ZoneInfo("Europe/Bucharest")
|
||||
|
||||
|
||||
def _now():
|
||||
"""Return current time in Bucharest timezone (naive, for display/storage)."""
|
||||
return datetime.now(_tz_bucharest).replace(tzinfo=None)
|
||||
|
||||
from . import order_reader, validation_service, import_service, sqlite_service, invoice_service, gomag_client
|
||||
from ..config import settings
|
||||
@@ -22,7 +30,7 @@ def _log_line(run_id: str, message: str):
|
||||
"""Append a timestamped line to the in-memory log buffer."""
|
||||
if run_id not in _run_logs:
|
||||
_run_logs[run_id] = []
|
||||
ts = datetime.now().strftime("%H:%M:%S")
|
||||
ts = _now().strftime("%H:%M:%S")
|
||||
_run_logs[run_id].append(f"[{ts}] {message}")
|
||||
|
||||
|
||||
@@ -62,11 +70,11 @@ async def prepare_sync(id_pol: int = None, id_sectie: int = None) -> dict:
|
||||
if _sync_lock.locked():
|
||||
return {"error": "Sync already running", "run_id": _current_sync.get("run_id") if _current_sync else None}
|
||||
|
||||
run_id = datetime.now().strftime("%Y%m%d_%H%M%S") + "_" + uuid.uuid4().hex[:6]
|
||||
run_id = _now().strftime("%Y%m%d_%H%M%S") + "_" + uuid.uuid4().hex[:6]
|
||||
_current_sync = {
|
||||
"run_id": run_id,
|
||||
"status": "running",
|
||||
"started_at": datetime.now().isoformat(),
|
||||
"started_at": _now().isoformat(),
|
||||
"finished_at": None,
|
||||
"phase": "starting",
|
||||
"phase_text": "Starting...",
|
||||
@@ -142,11 +150,11 @@ async def run_sync(id_pol: int = None, id_sectie: int = None, run_id: str = None
|
||||
async with _sync_lock:
|
||||
# Use provided run_id or generate one
|
||||
if not run_id:
|
||||
run_id = datetime.now().strftime("%Y%m%d_%H%M%S") + "_" + uuid.uuid4().hex[:6]
|
||||
run_id = _now().strftime("%Y%m%d_%H%M%S") + "_" + uuid.uuid4().hex[:6]
|
||||
_current_sync = {
|
||||
"run_id": run_id,
|
||||
"status": "running",
|
||||
"started_at": datetime.now().isoformat(),
|
||||
"started_at": _now().isoformat(),
|
||||
"finished_at": None,
|
||||
"phase": "reading",
|
||||
"phase_text": "Reading JSON files...",
|
||||
@@ -157,7 +165,7 @@ async def run_sync(id_pol: int = None, id_sectie: int = None, run_id: str = None
|
||||
|
||||
_update_progress("reading", "Reading JSON files...")
|
||||
|
||||
started_dt = datetime.now()
|
||||
started_dt = _now()
|
||||
_run_logs[run_id] = [
|
||||
f"=== Sync Run {run_id} ===",
|
||||
f"Inceput: {started_dt.strftime('%d.%m.%Y %H:%M:%S')}",
|
||||
@@ -322,9 +330,9 @@ async def run_sync(id_pol: int = None, id_sectie: int = None, run_id: str = None
|
||||
try:
|
||||
min_date = datetime.strptime(min_date_str[:10], "%Y-%m-%d") - timedelta(days=1)
|
||||
except (ValueError, TypeError):
|
||||
min_date = datetime.now() - timedelta(days=90)
|
||||
min_date = _now() - timedelta(days=90)
|
||||
else:
|
||||
min_date = datetime.now() - timedelta(days=90)
|
||||
min_date = _now() - timedelta(days=90)
|
||||
|
||||
existing_map = await asyncio.to_thread(
|
||||
validation_service.check_orders_in_roa, min_date, conn
|
||||
@@ -673,14 +681,14 @@ async def run_sync(id_pol: int = None, id_sectie: int = None, run_id: str = None
|
||||
"already_imported": already_imported_count, "cancelled": cancelled_count})
|
||||
if _current_sync:
|
||||
_current_sync["status"] = status
|
||||
_current_sync["finished_at"] = datetime.now().isoformat()
|
||||
_current_sync["finished_at"] = _now().isoformat()
|
||||
|
||||
logger.info(
|
||||
f"Sync {run_id} completed: {imported_count} new, {already_imported_count} already imported, "
|
||||
f"{len(skipped)} skipped, {error_count} errors, {cancelled_count} cancelled"
|
||||
)
|
||||
|
||||
duration = (datetime.now() - started_dt).total_seconds()
|
||||
duration = (_now() - started_dt).total_seconds()
|
||||
_log_line(run_id, "")
|
||||
cancelled_text = f", {cancelled_count} anulate" if cancelled_count else ""
|
||||
_run_logs[run_id].append(
|
||||
@@ -696,7 +704,7 @@ async def run_sync(id_pol: int = None, id_sectie: int = None, run_id: str = None
|
||||
await sqlite_service.update_sync_run(run_id, "failed", 0, 0, 0, 1, error_message=str(e))
|
||||
if _current_sync:
|
||||
_current_sync["status"] = "failed"
|
||||
_current_sync["finished_at"] = datetime.now().isoformat()
|
||||
_current_sync["finished_at"] = _now().isoformat()
|
||||
_current_sync["error"] = str(e)
|
||||
return {"run_id": run_id, "status": "failed", "error": str(e)}
|
||||
finally:
|
||||
|
||||
Reference in New Issue
Block a user