_log_order_error_history(order_number, msg) writes to logs/sync_errors_history.log via a dedicated RotatingFileHandler (100MB × 12 backups). Logger is lazy-initialised and non-propagating so it doesn't pollute the root logger. Purpose: orders.error_message is overwritten when a retry succeeds, so the history log preserves permanent audit of every malformed-order event regardless of later outcome. Helper never raises — callers are already in a degraded path. 3 unit tests: append semantics, multi-order, exception isolation. Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
76 lines
2.7 KiB
Python
76 lines
2.7 KiB
Python
"""Tests for _log_order_error_history — permanent audit trail."""
|
|
import os
|
|
import sys
|
|
import logging
|
|
import logging.handlers
|
|
import pytest
|
|
|
|
sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))
|
|
|
|
from app.services import sqlite_service
|
|
|
|
|
|
@pytest.fixture
|
|
def reset_logger(tmp_path, monkeypatch):
|
|
"""Redirect error history log into tmp_path for isolation."""
|
|
sqlite_service._error_history_logger = None
|
|
lg = logging.getLogger("sync_errors_history")
|
|
for h in list(lg.handlers):
|
|
lg.removeHandler(h)
|
|
logs_dir = tmp_path / "logs"
|
|
logs_dir.mkdir()
|
|
target = logs_dir / "sync_errors_history.log"
|
|
|
|
def fake_get_logger():
|
|
if sqlite_service._error_history_logger is not None:
|
|
return sqlite_service._error_history_logger
|
|
inner = logging.getLogger("sync_errors_history")
|
|
inner.setLevel(logging.INFO)
|
|
inner.propagate = False
|
|
handler = logging.handlers.RotatingFileHandler(
|
|
str(target), maxBytes=100 * 1024 * 1024, backupCount=12, encoding="utf-8"
|
|
)
|
|
handler.setFormatter(logging.Formatter("%(asctime)s %(message)s"))
|
|
inner.addHandler(handler)
|
|
sqlite_service._error_history_logger = inner
|
|
return inner
|
|
|
|
monkeypatch.setattr(sqlite_service, "_get_error_history_logger", fake_get_logger)
|
|
yield target
|
|
sqlite_service._error_history_logger = None
|
|
lg = logging.getLogger("sync_errors_history")
|
|
for h in list(lg.handlers):
|
|
h.close()
|
|
lg.removeHandler(h)
|
|
|
|
|
|
@pytest.mark.unit
|
|
def test_log_order_error_history_writes_line(reset_logger):
|
|
sqlite_service._log_order_error_history("485224762", "UNIQUE constraint failed")
|
|
logging.shutdown()
|
|
content = reset_logger.read_text(encoding="utf-8")
|
|
assert "ORDER_FAIL 485224762" in content
|
|
assert "UNIQUE constraint failed" in content
|
|
|
|
|
|
@pytest.mark.unit
|
|
def test_log_order_error_history_appends(reset_logger):
|
|
sqlite_service._log_order_error_history("1", "err-a")
|
|
sqlite_service._log_order_error_history("2", "err-b")
|
|
sqlite_service._log_order_error_history("2", "err-b-retry")
|
|
logging.shutdown()
|
|
content = reset_logger.read_text(encoding="utf-8")
|
|
assert "ORDER_FAIL 1: err-a" in content
|
|
assert "ORDER_FAIL 2: err-b" in content
|
|
# Two entries for order 2 — append-only guarantee
|
|
assert content.count("ORDER_FAIL 2:") == 2
|
|
|
|
|
|
@pytest.mark.unit
|
|
def test_log_order_error_history_swallows_errors(monkeypatch):
|
|
"""Callable must never raise — caller is already in a degraded path."""
|
|
def boom():
|
|
raise RuntimeError("disk full")
|
|
monkeypatch.setattr(sqlite_service, "_get_error_history_logger", boom)
|
|
sqlite_service._log_order_error_history("X", "ignored")
|