fix(import): resolve duplicate article + order_items repopulation on retry
Two production bugs from VENDING (order 485224762, 2026-04-22): 1. Oracle: ORA-20000 when a GoMag order contains a kit SKU whose expansion includes CODMAT X plus a second item with SKU=X. Two article-insert call-sites in PACK_IMPORT_COMENZI bypassed merge_or_insert_articol — line 622 (NOM_ARTICOLE fallback) and line 538 (kit discount line). Both now use merge_or_insert_articol for consistent dedup semantics. Regression test added in test_complete_import.py covering the exact kit-plus-direct scenario. 2. SQLite: retry_service._download_and_reimport refreshed orders row but never repopulated order_items. Combined with mark_order_deleted_in_roa (which wipes items), any retry/resync left the UI showing "Niciun articol" despite successful Oracle import. Retry now rebuilds items from the fresh GoMag download on both success and error paths, mirroring sync_service. Includes scripts/backfill_order_items.py — one-shot recovery for orders already in this bad state. Reads settings, re-fetches from GoMag, rewrites order_items without touching Oracle or order status. Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
@@ -13,7 +13,7 @@ async def _download_and_reimport(order_number: str, order_date_str: str, custome
|
||||
Does NOT check status guard — caller is responsible.
|
||||
Returns: {"success": bool, "message": str, "status": str|None}
|
||||
"""
|
||||
from . import sqlite_service, gomag_client, import_service, order_reader
|
||||
from . import sqlite_service, gomag_client, import_service, order_reader, validation_service
|
||||
|
||||
# Parse order date for narrow download window
|
||||
try:
|
||||
@@ -75,6 +75,28 @@ async def _download_and_reimport(order_number: str, order_date_str: str, custome
|
||||
)
|
||||
return {"success": False, "message": f"Eroare import: {e}"}
|
||||
|
||||
# Build order_items data from fresh GoMag download (mirrors sync_service:882-891).
|
||||
# Resolves ARTICOLE_TERTI mapping so UI shows mapped/direct badge.
|
||||
try:
|
||||
skus = {item.sku for item in target_order.items if item.sku}
|
||||
validation = await asyncio.to_thread(
|
||||
validation_service.validate_skus, skus, None, id_gestiuni
|
||||
) if skus else {"mapped": set(), "direct": set()}
|
||||
except Exception as e:
|
||||
logger.warning(f"Retry: validate_skus failed for {order_number}, defaulting mapping_status=direct: {e}")
|
||||
validation = {"mapped": set(), "direct": set()}
|
||||
|
||||
order_items_data = [
|
||||
{
|
||||
"sku": item.sku, "product_name": item.name,
|
||||
"quantity": item.quantity, "price": item.price,
|
||||
"baseprice": item.baseprice, "vat": item.vat,
|
||||
"mapping_status": "mapped" if item.sku in validation["mapped"] else "direct",
|
||||
"codmat": None, "id_articol": None, "cantitate_roa": None,
|
||||
}
|
||||
for item in target_order.items
|
||||
]
|
||||
|
||||
if result.get("success"):
|
||||
await sqlite_service.upsert_order(
|
||||
sync_run_id="retry",
|
||||
@@ -92,7 +114,8 @@ async def _download_and_reimport(order_number: str, order_date_str: str, custome
|
||||
id_adresa_facturare=result.get("id_adresa_facturare"),
|
||||
id_adresa_livrare=result.get("id_adresa_livrare"),
|
||||
)
|
||||
logger.info(f"Retry successful for order {order_number} → IMPORTED")
|
||||
await sqlite_service.add_order_items(order_number, order_items_data)
|
||||
logger.info(f"Retry successful for order {order_number} → IMPORTED ({len(order_items_data)} items)")
|
||||
return {"success": True, "message": "Comanda reimportata cu succes", "status": "IMPORTED"}
|
||||
else:
|
||||
error = result.get("error", "Unknown error")
|
||||
@@ -104,6 +127,7 @@ async def _download_and_reimport(order_number: str, order_date_str: str, custome
|
||||
status="ERROR",
|
||||
error_message=f"Retry: {error}",
|
||||
)
|
||||
await sqlite_service.add_order_items(order_number, order_items_data)
|
||||
return {"success": False, "message": f"Import esuat: {error}", "status": "ERROR"}
|
||||
|
||||
|
||||
|
||||
@@ -50,6 +50,7 @@ CREATE OR REPLACE PACKAGE BODY PACK_IMPORT_COMENZI AS
|
||||
-- 21.03.2026 - fix discount amount: v_disc_amt e per-kit, nu se imparte la v_cantitate_web
|
||||
-- 25.03.2026 - skip negative kit discount (markup), ROUND prices to nzecimale_pretv
|
||||
-- 25.03.2026 - kit discount inserat per-kit sub componente (nu deferred cross-kit)
|
||||
-- 22.04.2026 - fix duplicate article: NOM_ARTICOLE fallback si kit discount line folosesc merge_or_insert_articol (prod VENDING comanda 485224762)
|
||||
-- ====================================================================
|
||||
|
||||
-- Constante pentru configurare
|
||||
@@ -535,15 +536,15 @@ CREATE OR REPLACE PACKAGE BODY PACK_IMPORT_COMENZI AS
|
||||
|
||||
IF v_disc_amt > 0 THEN
|
||||
BEGIN
|
||||
PACK_COMENZI.adauga_articol_comanda(
|
||||
V_ID_COMANDA => v_id_comanda,
|
||||
V_ID_ARTICOL => v_disc_artid,
|
||||
V_ID_POL => NVL(p_kit_discount_id_pol, p_id_pol),
|
||||
V_CANTITATE => -1 * v_cantitate_web,
|
||||
V_PRET => ROUND(v_disc_amt, v_nzec_pretv),
|
||||
V_ID_UTIL => c_id_util,
|
||||
V_ID_SECTIE => p_id_sectie,
|
||||
V_PTVA => v_vat_key);
|
||||
merge_or_insert_articol(
|
||||
p_id_comanda => v_id_comanda,
|
||||
p_id_articol => v_disc_artid,
|
||||
p_id_pol => NVL(p_kit_discount_id_pol, p_id_pol),
|
||||
p_cantitate => -1 * v_cantitate_web,
|
||||
p_pret => ROUND(v_disc_amt, v_nzec_pretv),
|
||||
p_id_util => c_id_util,
|
||||
p_id_sectie => p_id_sectie,
|
||||
p_ptva => v_vat_key);
|
||||
v_articole_procesate := v_articole_procesate + 1;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
@@ -619,14 +620,14 @@ CREATE OR REPLACE PACKAGE BODY PACK_IMPORT_COMENZI AS
|
||||
v_pret_unitar := NVL(v_pret_web, 0);
|
||||
|
||||
BEGIN
|
||||
PACK_COMENZI.adauga_articol_comanda(V_ID_COMANDA => v_id_comanda,
|
||||
V_ID_ARTICOL => v_id_articol,
|
||||
V_ID_POL => NVL(v_id_pol_articol, p_id_pol),
|
||||
V_CANTITATE => v_cantitate_web,
|
||||
V_PRET => v_pret_unitar,
|
||||
V_ID_UTIL => c_id_util,
|
||||
V_ID_SECTIE => p_id_sectie,
|
||||
V_PTVA => v_vat);
|
||||
merge_or_insert_articol(p_id_comanda => v_id_comanda,
|
||||
p_id_articol => v_id_articol,
|
||||
p_id_pol => NVL(v_id_pol_articol, p_id_pol),
|
||||
p_cantitate => v_cantitate_web,
|
||||
p_pret => v_pret_unitar,
|
||||
p_id_util => c_id_util,
|
||||
p_id_sectie => p_id_sectie,
|
||||
p_ptva => v_vat);
|
||||
v_articole_procesate := v_articole_procesate + 1;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
|
||||
@@ -901,6 +901,98 @@ def test_duplicate_codmat_different_prices():
|
||||
return False
|
||||
|
||||
|
||||
def test_kit_component_plus_direct_sku_merge():
|
||||
"""Regression (prod VENDING 2026-04-22, order 485224762):
|
||||
Kit SKU expanding to CODMAT X + direct SKU = X in same order used to crash
|
||||
with ORA-20000 because NOM_ARTICOLE fallback bypassed merge_or_insert_articol.
|
||||
|
||||
After fix, both inserts succeed (merged if price/vat match, separate rows otherwise).
|
||||
"""
|
||||
print("\n" + "=" * 60)
|
||||
print("TEST: Kit component + direct SKU same CODMAT (no ORA-20000)")
|
||||
print("=" * 60)
|
||||
|
||||
try:
|
||||
with oracledb.connect(user=user, password=password, dsn=dsn) as conn:
|
||||
with conn.cursor() as cur:
|
||||
suffix = f'{datetime.now().strftime("%H%M%S")}-{random.randint(1000, 9999)}'
|
||||
setup_test_data(cur)
|
||||
partner_id = _create_test_partner(cur, suffix)
|
||||
|
||||
# Add extra kit mapping: KIT_DUP expands to CAF01 (2 units) + LAV001 (1 unit)
|
||||
# CAF01 is also importable as direct SKU (CODMAT match in NOM_ARTICOLE)
|
||||
cur.execute("DELETE FROM ARTICOLE_TERTI WHERE sku = 'KIT_DUP'")
|
||||
cur.execute("""INSERT INTO ARTICOLE_TERTI (sku, codmat, cantitate_roa, procent_pret, activ)
|
||||
VALUES ('KIT_DUP', 'CAF01', 2, 100, 1)""")
|
||||
cur.execute("""INSERT INTO ARTICOLE_TERTI (sku, codmat, cantitate_roa, procent_pret, activ)
|
||||
VALUES ('KIT_DUP', 'LAV001', 1, 100, 1)""")
|
||||
# Price for LAV001 in id_pol=1 (CAF01 already set by setup_test_data)
|
||||
cur.execute("""MERGE INTO crm_politici_pret_art dst
|
||||
USING (SELECT 1 AS id_pol, 9999002 AS id_articol FROM DUAL) src
|
||||
ON (dst.id_pol = src.id_pol AND dst.id_articol = src.id_articol)
|
||||
WHEN NOT MATCHED THEN INSERT (id_pol, id_articol, pret, proc_tvav)
|
||||
VALUES (src.id_pol, src.id_articol, 30.00, 19)""")
|
||||
conn.commit()
|
||||
|
||||
try:
|
||||
# Order contains BOTH the kit (which expands to CAF01 + LAV001)
|
||||
# AND direct SKU 'CAF01' — the exact production bug scenario.
|
||||
articles_json = (
|
||||
'[{"sku": "KIT_DUP", "quantity": "1", "price": "200", "vat": "19"},'
|
||||
' {"sku": "CAF01", "quantity": "3", "price": "50", "vat": "19"}]'
|
||||
)
|
||||
order_id = _import_order(
|
||||
cur, f'TEST-BIZ-KITDUP-{suffix}', partner_id, articles_json
|
||||
)
|
||||
|
||||
# Pre-fix: order_id was 0/NULL because RAISE_APPLICATION_ERROR
|
||||
# rolled back the transaction. Post-fix: order is created successfully.
|
||||
assert order_id and order_id > 0, (
|
||||
f"REGRESSION: order import failed (id={order_id}). "
|
||||
"Line 622 NOM_ARTICOLE fallback likely still bypasses merge_or_insert_articol."
|
||||
)
|
||||
|
||||
rows = _get_order_lines(cur, order_id)
|
||||
print(f" Order {order_id}, {len(rows)} lines:")
|
||||
for r in rows:
|
||||
print(f" qty={r[0]}, pret={r[1]:.2f}, codmat={r[2]}, ptva={r[3]}")
|
||||
|
||||
# CAF01 must appear with summed quantity (kit: 2x1=2) + (direct: 3) = 5
|
||||
# when price+vat align, OR as multiple rows summing to 5 when they don't.
|
||||
caf_positive = [r for r in rows if r[2] == 'CAF01' and r[0] > 0]
|
||||
total_caf_qty = sum(r[0] for r in caf_positive)
|
||||
assert len(caf_positive) >= 1, \
|
||||
f"Expected >=1 positive CAF01 line, got {len(caf_positive)}"
|
||||
assert total_caf_qty == 5, \
|
||||
f"Expected total CAF01 qty=5 (2 from kit + 3 direct), got {total_caf_qty}"
|
||||
print(f" PASS: CAF01 qty={total_caf_qty} across {len(caf_positive)} line(s), no ORA-20000")
|
||||
|
||||
conn.commit()
|
||||
finally:
|
||||
cur.execute("DELETE FROM ARTICOLE_TERTI WHERE sku = 'KIT_DUP'")
|
||||
cur.execute("DELETE FROM crm_politici_pret_art WHERE id_articol = 9999002 AND id_pol = 1")
|
||||
conn.commit()
|
||||
|
||||
teardown_test_data(cur)
|
||||
conn.commit()
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f" FAIL: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
try:
|
||||
with oracledb.connect(user=user, password=password, dsn=dsn) as conn:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute("DELETE FROM ARTICOLE_TERTI WHERE sku = 'KIT_DUP'")
|
||||
cur.execute("DELETE FROM crm_politici_pret_art WHERE id_articol = 9999002 AND id_pol = 1")
|
||||
teardown_test_data(cur)
|
||||
conn.commit()
|
||||
except:
|
||||
pass
|
||||
return False
|
||||
|
||||
|
||||
def test_pf_reverse_name_dedup():
|
||||
"""Test that PF partner with reversed name order is found, not duplicated.
|
||||
Creates partner 'POPESCU ION', then searches for 'ION POPESCU' — should return same id_part.
|
||||
@@ -1017,6 +1109,7 @@ if __name__ == "__main__":
|
||||
("Markup no negative discount", test_kit_markup_no_negative_discount),
|
||||
("Component price=0 import", test_kit_component_price_zero_import),
|
||||
("Duplicate CODMAT different prices", test_duplicate_codmat_different_prices),
|
||||
("Kit component + direct SKU same CODMAT", test_kit_component_plus_direct_sku_merge),
|
||||
]
|
||||
biz_passed = 0
|
||||
for name, test_fn in biz_tests:
|
||||
|
||||
115
scripts/backfill_order_items.py
Normal file
115
scripts/backfill_order_items.py
Normal file
@@ -0,0 +1,115 @@
|
||||
#!/usr/bin/env python3
|
||||
"""One-shot recovery: re-populate SQLite `order_items` for orders where the
|
||||
table was wiped (e.g. DELETED_IN_ROA → retry flow, before the retry items fix).
|
||||
|
||||
Reads settings from SQLite, downloads orders from GoMag API for a ~14-day window
|
||||
around the order date, finds the target order, rebuilds the items rows.
|
||||
|
||||
Does NOT touch Oracle. Does NOT change order status / id_comanda.
|
||||
|
||||
Usage (inside the venv, on the prod server):
|
||||
python scripts/backfill_order_items.py 485224762
|
||||
python scripts/backfill_order_items.py 485224762 485224763 # multiple
|
||||
"""
|
||||
import asyncio
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
||||
|
||||
from api.app.services import sqlite_service, gomag_client, order_reader, validation_service
|
||||
from api.app import database
|
||||
|
||||
|
||||
async def _backfill_one(order_number: str, app_settings: dict, use_oracle: bool) -> dict:
|
||||
detail = await sqlite_service.get_order_detail(order_number)
|
||||
if not detail:
|
||||
return {"ok": False, "msg": f"#{order_number}: nu e in SQLite"}
|
||||
|
||||
order_data = detail["order"]
|
||||
existing_items = len(detail["items"])
|
||||
order_date_str = order_data.get("order_date") or ""
|
||||
|
||||
try:
|
||||
order_date = datetime.fromisoformat(order_date_str.replace("Z", "+00:00")).date()
|
||||
except (ValueError, AttributeError):
|
||||
order_date = datetime.now().date() - timedelta(days=1)
|
||||
|
||||
days_back = max((datetime.now().date() - order_date).days + 2, 2)
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmp:
|
||||
await gomag_client.download_orders(
|
||||
tmp,
|
||||
days_back=days_back,
|
||||
api_key=app_settings.get("gomag_api_key"),
|
||||
api_shop=app_settings.get("gomag_api_shop"),
|
||||
limit=200,
|
||||
)
|
||||
orders, _ = order_reader.read_json_orders(json_dir=tmp)
|
||||
target = next((o for o in orders if str(o.number) == str(order_number)), None)
|
||||
|
||||
if not target:
|
||||
return {"ok": False, "msg": f"#{order_number}: nu e in GoMag (fereastra {days_back}z)"}
|
||||
|
||||
validation = {"mapped": set(), "direct": set()}
|
||||
if use_oracle:
|
||||
skus = {item.sku for item in target.items if item.sku}
|
||||
id_gestiune = app_settings.get("id_gestiune", "")
|
||||
id_gestiuni = [int(g.strip()) for g in id_gestiune.split(",") if g.strip()] if id_gestiune else None
|
||||
try:
|
||||
validation = await asyncio.to_thread(
|
||||
validation_service.validate_skus, skus, None, id_gestiuni
|
||||
)
|
||||
except Exception as e:
|
||||
print(f" [WARN] validate_skus a esuat, mapping_status default='direct': {e}")
|
||||
|
||||
items_data = [
|
||||
{
|
||||
"sku": item.sku, "product_name": item.name,
|
||||
"quantity": item.quantity, "price": item.price,
|
||||
"baseprice": item.baseprice, "vat": item.vat,
|
||||
"mapping_status": "mapped" if item.sku in validation["mapped"] else "direct",
|
||||
"codmat": None, "id_articol": None, "cantitate_roa": None,
|
||||
}
|
||||
for item in target.items
|
||||
]
|
||||
|
||||
await sqlite_service.add_order_items(order_number, items_data)
|
||||
return {
|
||||
"ok": True,
|
||||
"msg": f"#{order_number}: {len(items_data)} items scrise (era {existing_items})",
|
||||
}
|
||||
|
||||
|
||||
async def main(order_numbers: list[str]):
|
||||
app_settings = await sqlite_service.get_app_settings()
|
||||
|
||||
use_oracle = False
|
||||
try:
|
||||
database.init_oracle()
|
||||
use_oracle = True
|
||||
print("Oracle conectat — mapping_status va fi calculat corect.")
|
||||
except Exception as e:
|
||||
print(f"Oracle indisponibil ({e}) — mapping_status default 'direct'.")
|
||||
|
||||
try:
|
||||
for on in order_numbers:
|
||||
result = await _backfill_one(on, app_settings, use_oracle)
|
||||
tag = "OK " if result["ok"] else "FAIL"
|
||||
print(f"[{tag}] {result['msg']}")
|
||||
finally:
|
||||
if use_oracle:
|
||||
try:
|
||||
database.close_oracle()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) < 2:
|
||||
print("Usage: python scripts/backfill_order_items.py <order_number> [<order_number>...]")
|
||||
sys.exit(1)
|
||||
|
||||
asyncio.run(main(sys.argv[1:]))
|
||||
Reference in New Issue
Block a user