feat(pricing): kit/pachet pricing with price list lookup, replace procent_pret
- Oracle PL/SQL: kit pricing logic with Mode A (distributed discount) and
Mode B (separate discount line), dual policy support, PRETURI_CU_TVA flag
- Eliminate procent_pret from entire stack (Oracle, Python, JS, HTML)
- New settings: kit_pricing_mode, kit_discount_codmat, price_sync_enabled
- Settings UI: cards for Kit Pricing and Price Sync configuration
- Mappings UI: kit badges with lazy-loaded component prices from price list
- Price sync from orders: auto-update ROA prices when web prices differ
- Catalog price sync: new service to sync all GoMag product prices to ROA
- Kit component price validation: pre-check prices before import
- New endpoint GET /api/mappings/{sku}/prices for component price display
- New endpoints POST /api/price-sync/start, GET status, GET history
- DDL script 07_drop_procent_pret.sql (run after deploy confirmation)
Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
@@ -101,3 +101,77 @@ async def download_orders(
|
||||
await asyncio.sleep(1)
|
||||
|
||||
return {"pages": total_pages, "total": total_orders, "files": saved_files}
|
||||
|
||||
|
||||
async def download_products(
|
||||
api_key: str = None,
|
||||
api_shop: str = None,
|
||||
products_url: str = None,
|
||||
log_fn: Callable[[str], None] = None,
|
||||
) -> list[dict]:
|
||||
"""Download all products from GoMag Products API.
|
||||
Returns list of product dicts with: sku, price, vat, vat_included, bundleItems.
|
||||
"""
|
||||
def _log(msg: str):
|
||||
logger.info(msg)
|
||||
if log_fn:
|
||||
log_fn(msg)
|
||||
|
||||
effective_key = api_key or settings.GOMAG_API_KEY
|
||||
effective_shop = api_shop or settings.GOMAG_API_SHOP
|
||||
default_url = "https://api.gomag.ro/api/v1/product/read/json"
|
||||
effective_url = products_url or default_url
|
||||
|
||||
if not effective_key or not effective_shop:
|
||||
_log("GoMag API keys neconfigurați, skip product download")
|
||||
return []
|
||||
|
||||
headers = {
|
||||
"Apikey": effective_key,
|
||||
"ApiShop": effective_shop,
|
||||
"User-Agent": "Mozilla/5.0",
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
|
||||
all_products = []
|
||||
total_pages = 1
|
||||
|
||||
async with httpx.AsyncClient(timeout=30) as client:
|
||||
page = 1
|
||||
while page <= total_pages:
|
||||
params = {"page": page, "limit": 100}
|
||||
try:
|
||||
response = await client.get(effective_url, headers=headers, params=params)
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
except httpx.HTTPError as e:
|
||||
_log(f"GoMag Products API eroare pagina {page}: {e}")
|
||||
break
|
||||
except Exception as e:
|
||||
_log(f"GoMag Products eroare neașteptată pagina {page}: {e}")
|
||||
break
|
||||
|
||||
if page == 1:
|
||||
total_pages = int(data.get("pages", 1))
|
||||
_log(f"GoMag Products: {data.get('total', '?')} produse în {total_pages} pagini")
|
||||
|
||||
products = data.get("products", [])
|
||||
if isinstance(products, dict):
|
||||
products = [products]
|
||||
if isinstance(products, list):
|
||||
for p in products:
|
||||
if isinstance(p, dict) and p.get("sku"):
|
||||
all_products.append({
|
||||
"sku": p["sku"],
|
||||
"price": p.get("price", "0"),
|
||||
"vat": p.get("vat", "19"),
|
||||
"vat_included": p.get("vat_included", "1"),
|
||||
"bundleItems": p.get("bundleItems", []),
|
||||
})
|
||||
|
||||
page += 1
|
||||
if page <= total_pages:
|
||||
await asyncio.sleep(1)
|
||||
|
||||
_log(f"GoMag Products: {len(all_products)} produse cu SKU descărcate")
|
||||
return all_products
|
||||
|
||||
@@ -342,6 +342,12 @@ def import_single_order(order, id_pol: int = None, id_sectie: int = None, app_se
|
||||
# Convert list[int] to CSV string for Oracle VARCHAR2 param
|
||||
id_gestiune_csv = ",".join(str(g) for g in id_gestiuni) if id_gestiuni else None
|
||||
|
||||
# Kit pricing parameters from settings
|
||||
kit_mode = (app_settings or {}).get("kit_pricing_mode") or None
|
||||
kit_id_pol_prod = int((app_settings or {}).get("id_pol_productie") or 0) or None
|
||||
kit_discount_codmat = (app_settings or {}).get("kit_discount_codmat") or None
|
||||
kit_discount_id_pol = int((app_settings or {}).get("kit_discount_id_pol") or 0) or None
|
||||
|
||||
cur.callproc("PACK_IMPORT_COMENZI.importa_comanda", [
|
||||
order_number, # p_nr_comanda_ext
|
||||
order_date, # p_data_comanda
|
||||
@@ -352,7 +358,11 @@ def import_single_order(order, id_pol: int = None, id_sectie: int = None, app_se
|
||||
id_pol, # p_id_pol
|
||||
id_sectie, # p_id_sectie
|
||||
id_gestiune_csv, # p_id_gestiune (CSV string)
|
||||
id_comanda # v_id_comanda (OUT)
|
||||
kit_mode, # p_kit_mode
|
||||
kit_id_pol_prod, # p_id_pol_productie
|
||||
kit_discount_codmat, # p_kit_discount_codmat
|
||||
kit_discount_id_pol, # p_kit_discount_id_pol
|
||||
id_comanda # v_id_comanda (OUT) — MUST STAY LAST
|
||||
])
|
||||
|
||||
comanda_id = id_comanda.getvalue()
|
||||
|
||||
@@ -9,14 +9,8 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
def get_mappings(search: str = "", page: int = 1, per_page: int = 50,
|
||||
sort_by: str = "sku", sort_dir: str = "asc",
|
||||
show_deleted: bool = False, pct_filter: str = None):
|
||||
"""Get paginated mappings with optional search, sorting, and pct_filter.
|
||||
|
||||
pct_filter values:
|
||||
'complete' – only SKU groups where sum(procent_pret for active rows) == 100
|
||||
'incomplete' – only SKU groups where sum < 100
|
||||
None / 'all' – no filter
|
||||
"""
|
||||
show_deleted: bool = False):
|
||||
"""Get paginated mappings with optional search and sorting."""
|
||||
if database.pool is None:
|
||||
raise HTTPException(status_code=503, detail="Oracle unavailable")
|
||||
|
||||
@@ -29,7 +23,6 @@ def get_mappings(search: str = "", page: int = 1, per_page: int = 50,
|
||||
"denumire": "na.denumire",
|
||||
"um": "na.um",
|
||||
"cantitate_roa": "at.cantitate_roa",
|
||||
"procent_pret": "at.procent_pret",
|
||||
"activ": "at.activ",
|
||||
}
|
||||
sort_col = allowed_sort.get(sort_by, "at.sku")
|
||||
@@ -58,7 +51,7 @@ def get_mappings(search: str = "", page: int = 1, per_page: int = 50,
|
||||
# Fetch ALL matching rows (no pagination yet — we need to group by SKU first)
|
||||
data_sql = f"""
|
||||
SELECT at.sku, at.codmat, na.denumire, na.um, at.cantitate_roa,
|
||||
at.procent_pret, at.activ, at.sters,
|
||||
at.activ, at.sters,
|
||||
TO_CHAR(at.data_creare, 'YYYY-MM-DD HH24:MI') as data_creare
|
||||
FROM ARTICOLE_TERTI at
|
||||
LEFT JOIN nom_articole na ON na.codmat = at.codmat
|
||||
@@ -69,7 +62,7 @@ def get_mappings(search: str = "", page: int = 1, per_page: int = 50,
|
||||
columns = [col[0].lower() for col in cur.description]
|
||||
all_rows = [dict(zip(columns, row)) for row in cur.fetchall()]
|
||||
|
||||
# Group by SKU and compute pct_total for each group
|
||||
# Group by SKU
|
||||
from collections import OrderedDict
|
||||
groups = OrderedDict()
|
||||
for row in all_rows:
|
||||
@@ -78,64 +71,13 @@ def get_mappings(search: str = "", page: int = 1, per_page: int = 50,
|
||||
groups[sku] = []
|
||||
groups[sku].append(row)
|
||||
|
||||
# Compute counts across ALL groups (before pct_filter)
|
||||
total_skus = len(groups)
|
||||
complete_skus = 0
|
||||
incomplete_skus = 0
|
||||
for sku, rows in groups.items():
|
||||
pct_total = sum(
|
||||
(r["procent_pret"] or 0)
|
||||
for r in rows
|
||||
if r.get("activ") == 1
|
||||
)
|
||||
if abs(pct_total - 100) <= 0.01:
|
||||
complete_skus += 1
|
||||
else:
|
||||
incomplete_skus += 1
|
||||
|
||||
counts = {
|
||||
"total": total_skus,
|
||||
"complete": complete_skus,
|
||||
"incomplete": incomplete_skus,
|
||||
}
|
||||
|
||||
# Apply pct_filter
|
||||
if pct_filter in ("complete", "incomplete"):
|
||||
filtered_groups = {}
|
||||
for sku, rows in groups.items():
|
||||
pct_total = sum(
|
||||
(r["procent_pret"] or 0)
|
||||
for r in rows
|
||||
if r.get("activ") == 1
|
||||
)
|
||||
is_complete = abs(pct_total - 100) <= 0.01
|
||||
if pct_filter == "complete" and is_complete:
|
||||
filtered_groups[sku] = rows
|
||||
elif pct_filter == "incomplete" and not is_complete:
|
||||
filtered_groups[sku] = rows
|
||||
groups = filtered_groups
|
||||
counts = {"total": len(groups)}
|
||||
|
||||
# Flatten back to rows for pagination (paginate by raw row count)
|
||||
filtered_rows = [row for rows in groups.values() for row in rows]
|
||||
total = len(filtered_rows)
|
||||
page_rows = filtered_rows[offset: offset + per_page]
|
||||
|
||||
# Attach pct_total and is_complete to each row for the renderer
|
||||
# Re-compute per visible group
|
||||
sku_pct = {}
|
||||
for sku, rows in groups.items():
|
||||
pct_total = sum(
|
||||
(r["procent_pret"] or 0)
|
||||
for r in rows
|
||||
if r.get("activ") == 1
|
||||
)
|
||||
sku_pct[sku] = {"pct_total": pct_total, "is_complete": abs(pct_total - 100) <= 0.01}
|
||||
|
||||
for row in page_rows:
|
||||
meta = sku_pct.get(row["sku"], {"pct_total": 0, "is_complete": False})
|
||||
row["pct_total"] = meta["pct_total"]
|
||||
row["is_complete"] = meta["is_complete"]
|
||||
|
||||
return {
|
||||
"mappings": page_rows,
|
||||
"total": total,
|
||||
@@ -145,7 +87,7 @@ def get_mappings(search: str = "", page: int = 1, per_page: int = 50,
|
||||
"counts": counts,
|
||||
}
|
||||
|
||||
def create_mapping(sku: str, codmat: str, cantitate_roa: float = 1, procent_pret: float = 100, auto_restore: bool = False):
|
||||
def create_mapping(sku: str, codmat: str, cantitate_roa: float = 1, auto_restore: bool = False):
|
||||
"""Create a new mapping. Returns dict or raises HTTPException on duplicate.
|
||||
|
||||
When auto_restore=True, soft-deleted records are restored+updated instead of raising 409.
|
||||
@@ -194,11 +136,10 @@ def create_mapping(sku: str, codmat: str, cantitate_roa: float = 1, procent_pret
|
||||
if auto_restore:
|
||||
cur.execute("""
|
||||
UPDATE ARTICOLE_TERTI SET sters = 0, activ = 1,
|
||||
cantitate_roa = :cantitate_roa, procent_pret = :procent_pret,
|
||||
cantitate_roa = :cantitate_roa,
|
||||
data_modif = SYSDATE
|
||||
WHERE sku = :sku AND codmat = :codmat AND sters = 1
|
||||
""", {"sku": sku, "codmat": codmat,
|
||||
"cantitate_roa": cantitate_roa, "procent_pret": procent_pret})
|
||||
""", {"sku": sku, "codmat": codmat, "cantitate_roa": cantitate_roa})
|
||||
conn.commit()
|
||||
return {"sku": sku, "codmat": codmat}
|
||||
else:
|
||||
@@ -209,13 +150,13 @@ def create_mapping(sku: str, codmat: str, cantitate_roa: float = 1, procent_pret
|
||||
)
|
||||
|
||||
cur.execute("""
|
||||
INSERT INTO ARTICOLE_TERTI (sku, codmat, cantitate_roa, procent_pret, activ, sters, data_creare, id_util_creare)
|
||||
VALUES (:sku, :codmat, :cantitate_roa, :procent_pret, 1, 0, SYSDATE, -3)
|
||||
""", {"sku": sku, "codmat": codmat, "cantitate_roa": cantitate_roa, "procent_pret": procent_pret})
|
||||
INSERT INTO ARTICOLE_TERTI (sku, codmat, cantitate_roa, activ, sters, data_creare, id_util_creare)
|
||||
VALUES (:sku, :codmat, :cantitate_roa, 1, 0, SYSDATE, -3)
|
||||
""", {"sku": sku, "codmat": codmat, "cantitate_roa": cantitate_roa})
|
||||
conn.commit()
|
||||
return {"sku": sku, "codmat": codmat}
|
||||
|
||||
def update_mapping(sku: str, codmat: str, cantitate_roa: float = None, procent_pret: float = None, activ: int = None):
|
||||
def update_mapping(sku: str, codmat: str, cantitate_roa: float = None, activ: int = None):
|
||||
"""Update an existing mapping."""
|
||||
if database.pool is None:
|
||||
raise HTTPException(status_code=503, detail="Oracle unavailable")
|
||||
@@ -226,9 +167,6 @@ def update_mapping(sku: str, codmat: str, cantitate_roa: float = None, procent_p
|
||||
if cantitate_roa is not None:
|
||||
sets.append("cantitate_roa = :cantitate_roa")
|
||||
params["cantitate_roa"] = cantitate_roa
|
||||
if procent_pret is not None:
|
||||
sets.append("procent_pret = :procent_pret")
|
||||
params["procent_pret"] = procent_pret
|
||||
if activ is not None:
|
||||
sets.append("activ = :activ")
|
||||
params["activ"] = activ
|
||||
@@ -263,7 +201,7 @@ def delete_mapping(sku: str, codmat: str):
|
||||
return cur.rowcount > 0
|
||||
|
||||
def edit_mapping(old_sku: str, old_codmat: str, new_sku: str, new_codmat: str,
|
||||
cantitate_roa: float = 1, procent_pret: float = 100):
|
||||
cantitate_roa: float = 1):
|
||||
"""Edit a mapping. If PK changed, soft-delete old and insert new."""
|
||||
if not new_sku or not new_sku.strip():
|
||||
raise HTTPException(status_code=400, detail="SKU este obligatoriu")
|
||||
@@ -273,8 +211,8 @@ def edit_mapping(old_sku: str, old_codmat: str, new_sku: str, new_codmat: str,
|
||||
raise HTTPException(status_code=503, detail="Oracle unavailable")
|
||||
|
||||
if old_sku == new_sku and old_codmat == new_codmat:
|
||||
# Simple update - only cantitate/procent changed
|
||||
return update_mapping(new_sku, new_codmat, cantitate_roa, procent_pret)
|
||||
# Simple update - only cantitate changed
|
||||
return update_mapping(new_sku, new_codmat, cantitate_roa)
|
||||
else:
|
||||
# PK changed: soft-delete old, upsert new (MERGE handles existing soft-deleted target)
|
||||
with database.pool.acquire() as conn:
|
||||
@@ -291,14 +229,12 @@ def edit_mapping(old_sku: str, old_codmat: str, new_sku: str, new_codmat: str,
|
||||
ON (t.sku = s.sku AND t.codmat = s.codmat)
|
||||
WHEN MATCHED THEN UPDATE SET
|
||||
cantitate_roa = :cantitate_roa,
|
||||
procent_pret = :procent_pret,
|
||||
activ = 1, sters = 0,
|
||||
data_modif = SYSDATE
|
||||
WHEN NOT MATCHED THEN INSERT
|
||||
(sku, codmat, cantitate_roa, procent_pret, activ, sters, data_creare, id_util_creare)
|
||||
VALUES (:sku, :codmat, :cantitate_roa, :procent_pret, 1, 0, SYSDATE, -3)
|
||||
""", {"sku": new_sku, "codmat": new_codmat,
|
||||
"cantitate_roa": cantitate_roa, "procent_pret": procent_pret})
|
||||
(sku, codmat, cantitate_roa, activ, sters, data_creare, id_util_creare)
|
||||
VALUES (:sku, :codmat, :cantitate_roa, 1, 0, SYSDATE, -3)
|
||||
""", {"sku": new_sku, "codmat": new_codmat, "cantitate_roa": cantitate_roa})
|
||||
conn.commit()
|
||||
return True
|
||||
|
||||
@@ -317,7 +253,9 @@ def restore_mapping(sku: str, codmat: str):
|
||||
return cur.rowcount > 0
|
||||
|
||||
def import_csv(file_content: str):
|
||||
"""Import mappings from CSV content. Returns summary."""
|
||||
"""Import mappings from CSV content. Returns summary.
|
||||
Backward compatible: if procent_pret column exists in CSV, it is silently ignored.
|
||||
"""
|
||||
if database.pool is None:
|
||||
raise HTTPException(status_code=503, detail="Oracle unavailable")
|
||||
|
||||
@@ -342,7 +280,7 @@ def import_csv(file_content: str):
|
||||
|
||||
try:
|
||||
cantitate = float(row.get("cantitate_roa", "1") or "1")
|
||||
procent = float(row.get("procent_pret", "100") or "100")
|
||||
# procent_pret column ignored if present (backward compat)
|
||||
|
||||
cur.execute("""
|
||||
MERGE INTO ARTICOLE_TERTI t
|
||||
@@ -350,14 +288,13 @@ def import_csv(file_content: str):
|
||||
ON (t.sku = s.sku AND t.codmat = s.codmat)
|
||||
WHEN MATCHED THEN UPDATE SET
|
||||
cantitate_roa = :cantitate_roa,
|
||||
procent_pret = :procent_pret,
|
||||
activ = 1,
|
||||
sters = 0,
|
||||
data_modif = SYSDATE
|
||||
WHEN NOT MATCHED THEN INSERT
|
||||
(sku, codmat, cantitate_roa, procent_pret, activ, sters, data_creare, id_util_creare)
|
||||
VALUES (:sku, :codmat, :cantitate_roa, :procent_pret, 1, 0, SYSDATE, -3)
|
||||
""", {"sku": sku, "codmat": codmat, "cantitate_roa": cantitate, "procent_pret": procent})
|
||||
(sku, codmat, cantitate_roa, activ, sters, data_creare, id_util_creare)
|
||||
VALUES (:sku, :codmat, :cantitate_roa, 1, 0, SYSDATE, -3)
|
||||
""", {"sku": sku, "codmat": codmat, "cantitate_roa": cantitate})
|
||||
created += 1
|
||||
|
||||
except Exception as e:
|
||||
@@ -374,12 +311,12 @@ def export_csv():
|
||||
|
||||
output = io.StringIO()
|
||||
writer = csv.writer(output)
|
||||
writer.writerow(["sku", "codmat", "cantitate_roa", "procent_pret", "activ"])
|
||||
writer.writerow(["sku", "codmat", "cantitate_roa", "activ"])
|
||||
|
||||
with database.pool.acquire() as conn:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute("""
|
||||
SELECT sku, codmat, cantitate_roa, procent_pret, activ
|
||||
SELECT sku, codmat, cantitate_roa, activ
|
||||
FROM ARTICOLE_TERTI WHERE sters = 0 ORDER BY sku, codmat
|
||||
""")
|
||||
for row in cur:
|
||||
@@ -391,6 +328,70 @@ def get_csv_template():
|
||||
"""Return empty CSV template."""
|
||||
output = io.StringIO()
|
||||
writer = csv.writer(output)
|
||||
writer.writerow(["sku", "codmat", "cantitate_roa", "procent_pret"])
|
||||
writer.writerow(["EXAMPLE_SKU", "EXAMPLE_CODMAT", "1", "100"])
|
||||
writer.writerow(["sku", "codmat", "cantitate_roa"])
|
||||
writer.writerow(["EXAMPLE_SKU", "EXAMPLE_CODMAT", "1"])
|
||||
return output.getvalue()
|
||||
|
||||
def get_component_prices(sku: str, id_pol: int, id_pol_productie: int = None) -> list:
|
||||
"""Get prices from crm_politici_pret_art for kit components.
|
||||
Returns: [{"codmat", "denumire", "cantitate_roa", "pret", "pret_cu_tva", "proc_tvav", "ptva", "id_pol_used"}]
|
||||
"""
|
||||
if database.pool is None:
|
||||
raise HTTPException(status_code=503, detail="Oracle unavailable")
|
||||
|
||||
with database.pool.acquire() as conn:
|
||||
with conn.cursor() as cur:
|
||||
# Get components from ARTICOLE_TERTI
|
||||
cur.execute("""
|
||||
SELECT at.codmat, at.cantitate_roa, na.id_articol, na.cont, na.denumire
|
||||
FROM ARTICOLE_TERTI at
|
||||
JOIN NOM_ARTICOLE na ON na.codmat = at.codmat AND na.sters = 0 AND na.inactiv = 0
|
||||
WHERE at.sku = :sku AND at.activ = 1 AND at.sters = 0
|
||||
ORDER BY at.codmat
|
||||
""", {"sku": sku})
|
||||
components = cur.fetchall()
|
||||
|
||||
if len(components) <= 1:
|
||||
return [] # Not a kit
|
||||
|
||||
result = []
|
||||
for codmat, cant_roa, id_art, cont, denumire in components:
|
||||
# Determine policy based on account
|
||||
cont_str = str(cont or "").strip()
|
||||
pol = id_pol_productie if (cont_str in ("341", "345") and id_pol_productie) else id_pol
|
||||
|
||||
# Get PRETURI_CU_TVA flag
|
||||
cur.execute("SELECT PRETURI_CU_TVA FROM CRM_POLITICI_PRETURI WHERE ID_POL = :pol", {"pol": pol})
|
||||
pol_row = cur.fetchone()
|
||||
preturi_cu_tva_flag = pol_row[0] if pol_row else 0
|
||||
|
||||
# Get price
|
||||
cur.execute("""
|
||||
SELECT PRET, PROC_TVAV FROM crm_politici_pret_art
|
||||
WHERE id_pol = :pol AND id_articol = :id_art
|
||||
""", {"pol": pol, "id_art": id_art})
|
||||
price_row = cur.fetchone()
|
||||
|
||||
if price_row:
|
||||
pret, proc_tvav = price_row
|
||||
proc_tvav = proc_tvav or 1.19
|
||||
pret_cu_tva = pret if preturi_cu_tva_flag == 1 else round(pret * proc_tvav, 2)
|
||||
ptva = round((proc_tvav - 1) * 100)
|
||||
else:
|
||||
pret = 0
|
||||
pret_cu_tva = 0
|
||||
proc_tvav = 1.19
|
||||
ptva = 19
|
||||
|
||||
result.append({
|
||||
"codmat": codmat,
|
||||
"denumire": denumire or "",
|
||||
"cantitate_roa": float(cant_roa) if cant_roa else 1,
|
||||
"pret": float(pret) if pret else 0,
|
||||
"pret_cu_tva": float(pret_cu_tva),
|
||||
"proc_tvav": float(proc_tvav),
|
||||
"ptva": int(ptva),
|
||||
"id_pol_used": pol
|
||||
})
|
||||
|
||||
return result
|
||||
|
||||
220
api/app/services/price_sync_service.py
Normal file
220
api/app/services/price_sync_service.py
Normal file
@@ -0,0 +1,220 @@
|
||||
"""Catalog price sync service — syncs product prices from GoMag catalog to ROA Oracle."""
|
||||
import asyncio
|
||||
import logging
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
from . import gomag_client, validation_service, sqlite_service
|
||||
from .. import database
|
||||
from ..config import settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
_tz = ZoneInfo("Europe/Bucharest")
|
||||
|
||||
_price_sync_lock = asyncio.Lock()
|
||||
_current_price_sync = None
|
||||
|
||||
|
||||
def _now():
|
||||
return datetime.now(_tz).replace(tzinfo=None)
|
||||
|
||||
|
||||
async def prepare_price_sync() -> dict:
|
||||
global _current_price_sync
|
||||
if _price_sync_lock.locked():
|
||||
return {"error": "Price sync already running"}
|
||||
run_id = _now().strftime("%Y%m%d_%H%M%S") + "_ps_" + uuid.uuid4().hex[:6]
|
||||
_current_price_sync = {
|
||||
"run_id": run_id, "status": "running",
|
||||
"started_at": _now().isoformat(), "finished_at": None,
|
||||
"phase_text": "Starting...",
|
||||
}
|
||||
# Create SQLite record
|
||||
db = await sqlite_service.get_sqlite()
|
||||
try:
|
||||
await db.execute(
|
||||
"INSERT INTO price_sync_runs (run_id, started_at, status) VALUES (?, ?, 'running')",
|
||||
(run_id, _now().isoformat())
|
||||
)
|
||||
await db.commit()
|
||||
finally:
|
||||
await db.close()
|
||||
return {"run_id": run_id}
|
||||
|
||||
|
||||
async def get_price_sync_status() -> dict:
|
||||
if _current_price_sync and _current_price_sync.get("status") == "running":
|
||||
return _current_price_sync
|
||||
# Return last run from SQLite
|
||||
db = await sqlite_service.get_sqlite()
|
||||
try:
|
||||
cursor = await db.execute(
|
||||
"SELECT * FROM price_sync_runs ORDER BY started_at DESC LIMIT 1"
|
||||
)
|
||||
row = await cursor.fetchone()
|
||||
if row:
|
||||
return {"status": "idle", "last_run": dict(row)}
|
||||
return {"status": "idle"}
|
||||
except Exception:
|
||||
return {"status": "idle"}
|
||||
finally:
|
||||
await db.close()
|
||||
|
||||
|
||||
async def run_catalog_price_sync(run_id: str):
|
||||
global _current_price_sync
|
||||
async with _price_sync_lock:
|
||||
log_lines = []
|
||||
def _log(msg):
|
||||
logger.info(msg)
|
||||
log_lines.append(f"[{_now().strftime('%H:%M:%S')}] {msg}")
|
||||
if _current_price_sync:
|
||||
_current_price_sync["phase_text"] = msg
|
||||
|
||||
try:
|
||||
app_settings = await sqlite_service.get_app_settings()
|
||||
id_pol = int(app_settings.get("id_pol") or 0) or None
|
||||
id_pol_productie = int(app_settings.get("id_pol_productie") or 0) or None
|
||||
|
||||
if not id_pol:
|
||||
_log("Politica de preț nu e configurată — skip sync")
|
||||
await _finish_run(run_id, "error", log_lines, error="No price policy")
|
||||
return
|
||||
|
||||
# Fetch products from GoMag
|
||||
_log("Descărcare produse din GoMag API...")
|
||||
products = await gomag_client.download_products(
|
||||
api_key=app_settings.get("gomag_api_key"),
|
||||
api_shop=app_settings.get("gomag_api_shop"),
|
||||
products_url=app_settings.get("gomag_products_url") or None,
|
||||
log_fn=_log,
|
||||
)
|
||||
|
||||
if not products:
|
||||
_log("Niciun produs descărcat")
|
||||
await _finish_run(run_id, "completed", log_lines, products_total=0)
|
||||
return
|
||||
|
||||
# Connect to Oracle
|
||||
conn = await asyncio.to_thread(database.get_oracle_connection)
|
||||
try:
|
||||
# Get all mappings from ARTICOLE_TERTI
|
||||
_log("Citire mapări ARTICOLE_TERTI...")
|
||||
mapped_data = await asyncio.to_thread(
|
||||
validation_service.resolve_mapped_codmats,
|
||||
{p["sku"] for p in products}, conn
|
||||
)
|
||||
|
||||
# Get direct articles from NOM_ARTICOLE
|
||||
_log("Identificare articole directe...")
|
||||
direct_id_map = {}
|
||||
with conn.cursor() as cur:
|
||||
all_skus = list({p["sku"] for p in products})
|
||||
for i in range(0, len(all_skus), 500):
|
||||
batch = all_skus[i:i+500]
|
||||
placeholders = ",".join([f":s{j}" for j in range(len(batch))])
|
||||
params = {f"s{j}": sku for j, sku in enumerate(batch)}
|
||||
cur.execute(f"""
|
||||
SELECT codmat, id_articol, cont FROM nom_articole
|
||||
WHERE codmat IN ({placeholders}) AND sters = 0 AND inactiv = 0
|
||||
""", params)
|
||||
for row in cur:
|
||||
if row[0] not in mapped_data:
|
||||
direct_id_map[row[0]] = {"id_articol": row[1], "cont": row[2]}
|
||||
|
||||
matched = 0
|
||||
updated = 0
|
||||
errors = 0
|
||||
|
||||
for product in products:
|
||||
sku = product["sku"]
|
||||
try:
|
||||
price_str = product.get("price", "0")
|
||||
price = float(price_str) if price_str else 0
|
||||
if price <= 0:
|
||||
continue
|
||||
|
||||
vat = float(product.get("vat", "19"))
|
||||
vat_included = product.get("vat_included", "1")
|
||||
|
||||
# Calculate price with TVA
|
||||
if vat_included == "1":
|
||||
price_cu_tva = price
|
||||
else:
|
||||
price_cu_tva = price * (1 + vat / 100)
|
||||
|
||||
# Skip kits (>1 CODMAT)
|
||||
if sku in mapped_data and len(mapped_data[sku]) > 1:
|
||||
continue
|
||||
|
||||
# Determine id_articol and policy
|
||||
id_articol = None
|
||||
cantitate_roa = 1
|
||||
|
||||
if sku in mapped_data and len(mapped_data[sku]) == 1:
|
||||
comp = mapped_data[sku][0]
|
||||
id_articol = comp["id_articol"]
|
||||
cantitate_roa = comp.get("cantitate_roa") or 1
|
||||
elif sku in direct_id_map:
|
||||
id_articol = direct_id_map[sku]["id_articol"]
|
||||
else:
|
||||
continue # SKU not in ROA
|
||||
|
||||
matched += 1
|
||||
price_per_unit = price_cu_tva / cantitate_roa if cantitate_roa != 1 else price_cu_tva
|
||||
|
||||
# Determine policy
|
||||
cont = None
|
||||
if sku in mapped_data and len(mapped_data[sku]) == 1:
|
||||
cont = mapped_data[sku][0].get("cont")
|
||||
elif sku in direct_id_map:
|
||||
cont = direct_id_map[sku].get("cont")
|
||||
|
||||
cont_str = str(cont or "").strip()
|
||||
pol = id_pol_productie if (cont_str in ("341", "345") and id_pol_productie) else id_pol
|
||||
|
||||
result = await asyncio.to_thread(
|
||||
validation_service.compare_and_update_price,
|
||||
id_articol, pol, price_per_unit, conn
|
||||
)
|
||||
if result and result["updated"]:
|
||||
updated += 1
|
||||
_log(f" {result['codmat']}: {result['old_price']:.2f} → {result['new_price']:.2f}")
|
||||
|
||||
except Exception as e:
|
||||
errors += 1
|
||||
_log(f"Eroare produs {sku}: {e}")
|
||||
|
||||
_log(f"Sync complet: {len(products)} produse, {matched} potrivite, {updated} actualizate, {errors} erori")
|
||||
|
||||
finally:
|
||||
await asyncio.to_thread(database.pool.release, conn)
|
||||
|
||||
await _finish_run(run_id, "completed", log_lines,
|
||||
products_total=len(products), matched=matched,
|
||||
updated=updated, errors=errors)
|
||||
|
||||
except Exception as e:
|
||||
_log(f"Eroare critică: {e}")
|
||||
logger.error(f"Catalog price sync error: {e}", exc_info=True)
|
||||
await _finish_run(run_id, "error", log_lines, error=str(e))
|
||||
|
||||
|
||||
async def _finish_run(run_id, status, log_lines, products_total=0,
|
||||
matched=0, updated=0, errors=0, error=None):
|
||||
global _current_price_sync
|
||||
db = await sqlite_service.get_sqlite()
|
||||
try:
|
||||
await db.execute("""
|
||||
UPDATE price_sync_runs SET
|
||||
finished_at = ?, status = ?, products_total = ?,
|
||||
matched = ?, updated = ?, errors = ?,
|
||||
log_text = ?
|
||||
WHERE run_id = ?
|
||||
""", (_now().isoformat(), status, products_total, matched, updated, errors,
|
||||
"\n".join(log_lines), run_id))
|
||||
await db.commit()
|
||||
finally:
|
||||
await db.close()
|
||||
_current_price_sync = None
|
||||
@@ -4,6 +4,9 @@ from datetime import datetime
|
||||
from zoneinfo import ZoneInfo
|
||||
from ..database import get_sqlite, get_sqlite_sync
|
||||
|
||||
# Re-export so other services can import get_sqlite from sqlite_service
|
||||
__all__ = ["get_sqlite", "get_sqlite_sync"]
|
||||
|
||||
_tz_bucharest = ZoneInfo("Europe/Bucharest")
|
||||
|
||||
|
||||
@@ -927,3 +930,22 @@ async def set_app_setting(key: str, value: str):
|
||||
await db.commit()
|
||||
finally:
|
||||
await db.close()
|
||||
|
||||
|
||||
# ── Price Sync Runs ───────────────────────────────
|
||||
|
||||
async def get_price_sync_runs(page: int = 1, per_page: int = 20):
|
||||
"""Get paginated price sync run history."""
|
||||
db = await get_sqlite()
|
||||
try:
|
||||
offset = (page - 1) * per_page
|
||||
cursor = await db.execute("SELECT COUNT(*) FROM price_sync_runs")
|
||||
total = (await cursor.fetchone())[0]
|
||||
cursor = await db.execute(
|
||||
"SELECT * FROM price_sync_runs ORDER BY started_at DESC LIMIT ? OFFSET ?",
|
||||
(per_page, offset)
|
||||
)
|
||||
runs = [dict(r) for r in await cursor.fetchall()]
|
||||
return {"runs": runs, "total": total, "page": page, "pages": (total + per_page - 1) // per_page}
|
||||
finally:
|
||||
await db.close()
|
||||
|
||||
@@ -465,6 +465,7 @@ async def run_sync(id_pol: int = None, id_sectie: int = None, run_id: str = None
|
||||
if item.sku in validation["mapped"]:
|
||||
mapped_skus_in_orders.add(item.sku)
|
||||
|
||||
mapped_codmat_data = {}
|
||||
if mapped_skus_in_orders:
|
||||
mapped_codmat_data = await asyncio.to_thread(
|
||||
validation_service.resolve_mapped_codmats, mapped_skus_in_orders, conn
|
||||
@@ -501,6 +502,33 @@ async def run_sync(id_pol: int = None, id_sectie: int = None, run_id: str = None
|
||||
# Pass codmat_policy_map to import via app_settings
|
||||
if codmat_policy_map:
|
||||
app_settings["_codmat_policy_map"] = codmat_policy_map
|
||||
|
||||
# ── Kit component price validation ──
|
||||
kit_pricing_mode = app_settings.get("kit_pricing_mode")
|
||||
if kit_pricing_mode and mapped_codmat_data:
|
||||
id_pol_prod = int(app_settings.get("id_pol_productie") or 0) or None
|
||||
kit_missing = await asyncio.to_thread(
|
||||
validation_service.validate_kit_component_prices,
|
||||
mapped_codmat_data, id_pol, id_pol_prod, conn
|
||||
)
|
||||
if kit_missing:
|
||||
kit_skus_missing = set(kit_missing.keys())
|
||||
for sku, missing_codmats in kit_missing.items():
|
||||
_log_line(run_id, f"Kit {sku}: prețuri lipsă pentru {', '.join(missing_codmats)}")
|
||||
new_truly = []
|
||||
for order in truly_importable:
|
||||
order_skus = {item.sku for item in order.items}
|
||||
if order_skus & kit_skus_missing:
|
||||
missing_list = list(order_skus & kit_skus_missing)
|
||||
skipped.append((order, missing_list))
|
||||
else:
|
||||
new_truly.append(order)
|
||||
truly_importable = new_truly
|
||||
|
||||
# Mode B config validation
|
||||
if kit_pricing_mode == "separate_line":
|
||||
if not app_settings.get("kit_discount_codmat"):
|
||||
_log_line(run_id, "EROARE: Kit mode 'separate_line' dar kit_discount_codmat nu e configurat!")
|
||||
finally:
|
||||
await asyncio.to_thread(database.pool.release, conn)
|
||||
|
||||
@@ -565,6 +593,28 @@ async def run_sync(id_pol: int = None, id_sectie: int = None, run_id: str = None
|
||||
})
|
||||
_log_line(run_id, f"#{order.number} [{order.date or '?'}] {customer} → OMIS (lipsa: {', '.join(missing_skus)})")
|
||||
await sqlite_service.save_orders_batch(skipped_batch)
|
||||
|
||||
# ── Price sync from orders ──
|
||||
if app_settings.get("price_sync_enabled") == "1":
|
||||
try:
|
||||
all_sync_orders = truly_importable + already_in_roa
|
||||
direct_id_map = validation.get("direct_id_map", {})
|
||||
id_pol_prod = int(app_settings.get("id_pol_productie") or 0) or None
|
||||
price_updates = await asyncio.to_thread(
|
||||
validation_service.sync_prices_from_order,
|
||||
all_sync_orders, mapped_codmat_data,
|
||||
direct_id_map, codmat_policy_map, id_pol,
|
||||
id_pol_productie=id_pol_prod,
|
||||
settings=app_settings
|
||||
)
|
||||
if price_updates:
|
||||
_log_line(run_id, f"Sync prețuri: {len(price_updates)} prețuri actualizate")
|
||||
for pu in price_updates:
|
||||
_log_line(run_id, f" {pu['codmat']}: {pu['old_price']:.2f} → {pu['new_price']:.2f}")
|
||||
except Exception as e:
|
||||
_log_line(run_id, f"Eroare sync prețuri din comenzi: {e}")
|
||||
logger.error(f"Price sync error: {e}")
|
||||
|
||||
_update_progress("skipped", f"Skipped {skipped_count}",
|
||||
0, len(truly_importable),
|
||||
{"imported": 0, "skipped": skipped_count, "errors": 0, "already_imported": already_imported_count})
|
||||
|
||||
@@ -367,7 +367,7 @@ def validate_and_ensure_prices_dual(codmats: set[str], id_pol_vanzare: int,
|
||||
def resolve_mapped_codmats(mapped_skus: set[str], conn) -> dict[str, list[dict]]:
|
||||
"""For mapped SKUs, get their underlying CODMATs from ARTICOLE_TERTI + nom_articole.
|
||||
|
||||
Returns: {sku: [{"codmat": str, "id_articol": int, "cont": str|None}]}
|
||||
Returns: {sku: [{"codmat": str, "id_articol": int, "cont": str|None, "cantitate_roa": float|None}]}
|
||||
"""
|
||||
if not mapped_skus:
|
||||
return {}
|
||||
@@ -382,7 +382,7 @@ def resolve_mapped_codmats(mapped_skus: set[str], conn) -> dict[str, list[dict]]
|
||||
params = {f"s{j}": sku for j, sku in enumerate(batch)}
|
||||
|
||||
cur.execute(f"""
|
||||
SELECT at.sku, at.codmat, na.id_articol, na.cont
|
||||
SELECT at.sku, at.codmat, na.id_articol, na.cont, at.cantitate_roa
|
||||
FROM ARTICOLE_TERTI at
|
||||
JOIN NOM_ARTICOLE na ON na.codmat = at.codmat AND na.sters = 0 AND na.inactiv = 0
|
||||
WHERE at.sku IN ({placeholders}) AND at.activ = 1 AND at.sters = 0
|
||||
@@ -394,8 +394,162 @@ def resolve_mapped_codmats(mapped_skus: set[str], conn) -> dict[str, list[dict]]
|
||||
result[sku].append({
|
||||
"codmat": row[1],
|
||||
"id_articol": row[2],
|
||||
"cont": row[3]
|
||||
"cont": row[3],
|
||||
"cantitate_roa": row[4]
|
||||
})
|
||||
|
||||
logger.info(f"resolve_mapped_codmats: {len(result)} SKUs → {sum(len(v) for v in result.values())} CODMATs")
|
||||
return result
|
||||
|
||||
|
||||
def validate_kit_component_prices(mapped_codmat_data: dict, id_pol: int,
|
||||
id_pol_productie: int = None, conn=None) -> dict:
|
||||
"""Pre-validate that kit components have non-zero prices in crm_politici_pret_art.
|
||||
|
||||
Args:
|
||||
mapped_codmat_data: {sku: [{"codmat", "id_articol", "cont"}, ...]} from resolve_mapped_codmats
|
||||
id_pol: default sales price policy
|
||||
id_pol_productie: production price policy (for cont 341/345)
|
||||
|
||||
Returns: {sku: [missing_codmats]} for SKUs with missing prices, {} if all OK
|
||||
"""
|
||||
missing = {}
|
||||
own_conn = conn is None
|
||||
if own_conn:
|
||||
conn = database.get_oracle_connection()
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
for sku, components in mapped_codmat_data.items():
|
||||
if len(components) <= 1:
|
||||
continue # Not a kit
|
||||
sku_missing = []
|
||||
for comp in components:
|
||||
cont = str(comp.get("cont") or "").strip()
|
||||
if cont in ("341", "345") and id_pol_productie:
|
||||
pol = id_pol_productie
|
||||
else:
|
||||
pol = id_pol
|
||||
cur.execute("""
|
||||
SELECT PRET FROM crm_politici_pret_art
|
||||
WHERE id_pol = :pol AND id_articol = :id_art
|
||||
""", {"pol": pol, "id_art": comp["id_articol"]})
|
||||
row = cur.fetchone()
|
||||
if not row or (row[0] is not None and row[0] == 0):
|
||||
sku_missing.append(comp["codmat"])
|
||||
if sku_missing:
|
||||
missing[sku] = sku_missing
|
||||
finally:
|
||||
if own_conn:
|
||||
database.pool.release(conn)
|
||||
return missing
|
||||
|
||||
|
||||
def compare_and_update_price(id_articol: int, id_pol: int, web_price_cu_tva: float,
|
||||
conn, tolerance: float = 0.01) -> dict | None:
|
||||
"""Compare web price with ROA price and update if different.
|
||||
|
||||
Handles PRETURI_CU_TVA flag per policy.
|
||||
Returns: {"updated": bool, "old_price": float, "new_price": float, "codmat": str} or None if no price entry.
|
||||
"""
|
||||
with conn.cursor() as cur:
|
||||
cur.execute("SELECT PRETURI_CU_TVA FROM CRM_POLITICI_PRETURI WHERE ID_POL = :pol", {"pol": id_pol})
|
||||
pol_row = cur.fetchone()
|
||||
if not pol_row:
|
||||
return None
|
||||
preturi_cu_tva = pol_row[0] # 1 or 0
|
||||
|
||||
cur.execute("""
|
||||
SELECT PRET, PROC_TVAV, na.codmat
|
||||
FROM crm_politici_pret_art pa
|
||||
JOIN nom_articole na ON na.id_articol = pa.id_articol
|
||||
WHERE pa.id_pol = :pol AND pa.id_articol = :id_art
|
||||
""", {"pol": id_pol, "id_art": id_articol})
|
||||
row = cur.fetchone()
|
||||
if not row:
|
||||
return None
|
||||
|
||||
pret_roa, proc_tvav, codmat = row[0], row[1], row[2]
|
||||
proc_tvav = proc_tvav or 1.19
|
||||
|
||||
if preturi_cu_tva == 1:
|
||||
pret_roa_cu_tva = pret_roa
|
||||
else:
|
||||
pret_roa_cu_tva = pret_roa * proc_tvav
|
||||
|
||||
if abs(pret_roa_cu_tva - web_price_cu_tva) <= tolerance:
|
||||
return {"updated": False, "old_price": pret_roa_cu_tva, "new_price": web_price_cu_tva, "codmat": codmat}
|
||||
|
||||
if preturi_cu_tva == 1:
|
||||
new_pret = web_price_cu_tva
|
||||
else:
|
||||
new_pret = round(web_price_cu_tva / proc_tvav, 4)
|
||||
|
||||
cur.execute("""
|
||||
UPDATE crm_politici_pret_art SET PRET = :pret, DATAORA = SYSDATE
|
||||
WHERE id_pol = :pol AND id_articol = :id_art
|
||||
""", {"pret": new_pret, "pol": id_pol, "id_art": id_articol})
|
||||
conn.commit()
|
||||
|
||||
return {"updated": True, "old_price": pret_roa_cu_tva, "new_price": web_price_cu_tva, "codmat": codmat}
|
||||
|
||||
|
||||
def sync_prices_from_order(orders, mapped_codmat_data: dict, direct_id_map: dict,
|
||||
codmat_policy_map: dict, id_pol: int,
|
||||
id_pol_productie: int = None, conn=None,
|
||||
settings: dict = None) -> list:
|
||||
"""Sync prices from order items to ROA for direct/1:1 mappings.
|
||||
|
||||
Skips kit components and transport/discount CODMATs.
|
||||
Returns: list of {"codmat", "old_price", "new_price"} for updated prices.
|
||||
"""
|
||||
if settings and settings.get("price_sync_enabled") != "1":
|
||||
return []
|
||||
|
||||
transport_codmat = (settings or {}).get("transport_codmat", "")
|
||||
discount_codmat = (settings or {}).get("discount_codmat", "")
|
||||
kit_discount_codmat = (settings or {}).get("kit_discount_codmat", "")
|
||||
skip_codmats = {transport_codmat, discount_codmat, kit_discount_codmat} - {""}
|
||||
|
||||
# Build set of kit SKUs (>1 component)
|
||||
kit_skus = {sku for sku, comps in mapped_codmat_data.items() if len(comps) > 1}
|
||||
|
||||
updated = []
|
||||
own_conn = conn is None
|
||||
if own_conn:
|
||||
conn = database.get_oracle_connection()
|
||||
try:
|
||||
for order in orders:
|
||||
for item in order.items:
|
||||
sku = item.sku
|
||||
if not sku or sku in skip_codmats:
|
||||
continue
|
||||
if sku in kit_skus:
|
||||
continue # Don't sync prices from kit orders
|
||||
|
||||
web_price = item.price # already with TVA
|
||||
if not web_price or web_price <= 0:
|
||||
continue
|
||||
|
||||
# Determine id_articol and price policy for this SKU
|
||||
if sku in mapped_codmat_data and len(mapped_codmat_data[sku]) == 1:
|
||||
# 1:1 mapping via ARTICOLE_TERTI
|
||||
comp = mapped_codmat_data[sku][0]
|
||||
id_articol = comp["id_articol"]
|
||||
cantitate_roa = comp.get("cantitate_roa") or 1
|
||||
web_price_per_unit = web_price / cantitate_roa if cantitate_roa != 1 else web_price
|
||||
elif sku in (direct_id_map or {}):
|
||||
info = direct_id_map[sku]
|
||||
id_articol = info["id_articol"] if isinstance(info, dict) else info
|
||||
web_price_per_unit = web_price
|
||||
else:
|
||||
continue
|
||||
|
||||
pol = codmat_policy_map.get(sku, id_pol)
|
||||
result = compare_and_update_price(id_articol, pol, web_price_per_unit, conn)
|
||||
if result and result["updated"]:
|
||||
updated.append(result)
|
||||
finally:
|
||||
if own_conn:
|
||||
database.pool.release(conn)
|
||||
|
||||
return updated
|
||||
|
||||
Reference in New Issue
Block a user