feat(anaf-dedup): ANAF partner dedup + address fix + UI enrichment

Prevent partner duplicates via ANAF CUI verification and dual PL/SQL
search. Fix address matching with street-level comparison and diacritics
normalization. Show partner/address comparison in order detail modal.

- New anaf_service.py: batch ANAF API client with chunking, retry, cache
- PL/SQL: dual CUI search (bare/RO+bare/RO space+bare), 3-tier address
  search (street+city+id_loc → city+id_loc → create), strip_diacritics
  at storage for addresses and partner names
- SQLite: anaf_cache table, 12 new order columns for partner/address data
- import_service: cod_fiscal_override param, return partner/address from Oracle
- sync_service: ANAF batch integration, denomination mismatch detection,
  cache pre-population trigger
- Router: enriched order_detail with partner_info + addresses JSON
- UI: collapsible Detalii Partener + Adrese Comparativ sections in modal,
  auto-expand on mismatch, ANAF badges, mobile address cards
- Dashboard: address quality attention indicator
- New scan_duplicate_partners.py script for one-time duplicate audit

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
Claude Agent
2026-04-01 14:36:52 +00:00
parent 3b9198d742
commit 2f593c30f6
12 changed files with 925 additions and 64 deletions

View File

@@ -179,6 +179,13 @@ CREATE TABLE IF NOT EXISTS order_items (
PRIMARY KEY (order_number, sku)
);
CREATE INDEX IF NOT EXISTS idx_order_items_order ON order_items(order_number);
CREATE TABLE IF NOT EXISTS anaf_cache (
cui TEXT PRIMARY KEY,
scp_tva INTEGER,
denumire_anaf TEXT,
checked_at TEXT NOT NULL
);
"""
_sqlite_db_path = None
@@ -333,6 +340,18 @@ def init_sqlite():
("web_status", "TEXT"),
("discount_split", "TEXT"),
("price_match", "INTEGER"),
("cod_fiscal_gomag", "TEXT"),
("cod_fiscal_roa", "TEXT"),
("denumire_roa", "TEXT"),
("anaf_platitor_tva", "INTEGER"),
("anaf_checked_at", "TEXT"),
("anaf_cod_fiscal_adjusted", "INTEGER DEFAULT 0"),
("adresa_livrare_gomag", "TEXT"),
("adresa_facturare_gomag", "TEXT"),
("adresa_livrare_roa", "TEXT"),
("adresa_facturare_roa", "TEXT"),
("anaf_denumire_mismatch", "INTEGER DEFAULT 0"),
("denumire_anaf", "TEXT"),
]:
if col not in order_cols:
conn.execute(f"ALTER TABLE orders ADD COLUMN {col} {typedef}")

View File

@@ -529,6 +529,33 @@ async def order_detail(order_number: str):
except (json.JSONDecodeError, TypeError):
pass
# Partner info
order["partner_info"] = {
"cod_fiscal_gomag": order.get("cod_fiscal_gomag"),
"cod_fiscal_roa": order.get("cod_fiscal_roa"),
"denumire_roa": order.get("denumire_roa"),
"anaf_platitor_tva": order.get("anaf_platitor_tva"),
"anaf_checked_at": order.get("anaf_checked_at"),
"anaf_cod_fiscal_adjusted": order.get("anaf_cod_fiscal_adjusted") == 1,
"anaf_denumire_mismatch": order.get("anaf_denumire_mismatch") == 1,
"denumire_anaf": order.get("denumire_anaf"),
}
# Parse JSON address strings
for key in ("adresa_livrare_gomag", "adresa_facturare_gomag",
"adresa_livrare_roa", "adresa_facturare_roa"):
val = order.get(key)
if val and isinstance(val, str):
try:
order[key] = json.loads(val)
except (json.JSONDecodeError, TypeError):
pass
order["addresses"] = {
"livrare_gomag": order.get("adresa_livrare_gomag"),
"facturare_gomag": order.get("adresa_facturare_gomag"),
"livrare_roa": order.get("adresa_livrare_roa"),
"facturare_roa": order.get("adresa_facturare_roa"),
}
# Add settings for receipt display (app_settings already fetched above)
order["transport_vat"] = app_settings.get("transport_vat") or "21"
order["transport_codmat"] = app_settings.get("transport_codmat") or ""
@@ -684,6 +711,16 @@ async def dashboard_orders(page: int = 1, per_page: int = 50,
except Exception:
counts["unresolved_skus"] = 0
# Address quality: count orders with incomplete ROA addresses
try:
addr_count = await sqlite_service.get_incomplete_addresses_count()
if addr_count == -1: # stale cache — skip
counts["incomplete_addresses"] = 0
else:
counts["incomplete_addresses"] = addr_count
except Exception:
counts["incomplete_addresses"] = 0
# For UNINVOICED filter: apply server-side filtering + pagination
if is_uninvoiced_filter:
filtered = [o for o in all_orders if o.get("status") in ("IMPORTED", "ALREADY_IMPORTED") and not o.get("invoice")]

View File

@@ -0,0 +1,142 @@
import re
import logging
import httpx
import asyncio
from datetime import datetime
logger = logging.getLogger(__name__)
# Romanian diacritics to ASCII mapping (same 14 chars as import_service)
_DIACRITICS = str.maketrans('ĂăÂâÎîȘșȚțŞşŢţ', 'AAAAIISSTTSSTT')
def strip_ro_prefix(cod_fiscal: str) -> str:
"""Normalize CUI: strip whitespace, uppercase, remove 'RO' prefix."""
if not cod_fiscal:
return ""
cleaned = cod_fiscal.strip().upper()
return re.sub(r'^RO\s*', '', cleaned)
def validate_cui(bare_cui: str) -> bool:
"""Validate bare CUI: digits only, length 1-13."""
if not bare_cui:
return False
return bare_cui.isdigit() and 1 <= len(bare_cui) <= 13
async def check_vat_status_batch(cui_list: list[str], date: str = None) -> dict[str, dict]:
"""POST to ANAF API to check VAT status for a batch of CUIs.
Chunks in batches of 500 (ANAF API limit).
Returns {cui_str: {"scpTVA": bool|None, "denumire_anaf": str, "checked_at": str}, ...}
"""
if not cui_list:
return {}
check_date = date or datetime.now().strftime("%Y-%m-%d")
results = {}
for i in range(0, len(cui_list), 500):
chunk = cui_list[i:i+500]
body = [{"cui": int(cui), "data": check_date} for cui in chunk if cui.isdigit()]
if not body:
continue
chunk_results = await _call_anaf_api(body)
results.update(chunk_results)
return results
async def _call_anaf_api(body: list[dict], retry: int = 0) -> dict[str, dict]:
"""Internal: single ANAF API call with retry logic."""
url = "https://webservicesp.anaf.ro/api/PlatitorTvaRest/v9/tva"
results = {}
try:
async with httpx.AsyncClient(timeout=10.0) as client:
response = await client.post(url, json=body)
if response.status_code == 429:
if retry < 1:
logger.warning("ANAF API rate limited (429), retrying in 10s...")
await asyncio.sleep(10)
return await _call_anaf_api(body, retry + 1)
logger.error("ANAF API rate limited after retry")
return {}
if response.status_code >= 500:
if retry < 1:
logger.warning(f"ANAF API server error ({response.status_code}), retrying in 3s...")
await asyncio.sleep(3)
return await _call_anaf_api(body, retry + 1)
logger.error(f"ANAF API server error after retry: {response.status_code}")
return {}
response.raise_for_status()
data = response.json()
checked_at = datetime.now().isoformat()
# Parse ANAF response
found_list = data.get("found", [])
for item in found_list:
cui_str = str(item.get("cui", ""))
date_generals = item.get("date_generale", {})
results[cui_str] = {
"scpTVA": item.get("inregistrare_scop_Tva", {}).get("scpTVA"),
"denumire_anaf": date_generals.get("denumire", ""),
"checked_at": checked_at,
}
# Not found CUIs
notfound_list = data.get("notfound", [])
for item in notfound_list:
cui_str = str(item.get("cui", ""))
results[cui_str] = {
"scpTVA": None,
"denumire_anaf": "",
"checked_at": checked_at,
}
logger.info(f"ANAF batch: {len(body)} CUIs → {len(found_list)} found, {len(notfound_list)} not found")
except httpx.TimeoutException:
if retry < 1:
logger.warning("ANAF API timeout, retrying in 3s...")
await asyncio.sleep(3)
return await _call_anaf_api(body, retry + 1)
logger.error("ANAF API timeout after retry")
except Exception as e:
if retry < 1:
logger.warning(f"ANAF API error: {e}, retrying in 3s...")
await asyncio.sleep(3)
return await _call_anaf_api(body, retry + 1)
logger.error(f"ANAF API error after retry: {e}")
return results
def determine_correct_cod_fiscal(bare_cui: str, is_vat_payer: bool | None) -> str:
"""Determine the correct cod_fiscal format based on ANAF VAT status.
True → "RO" + bare, False → bare, None → bare (conservative)
"""
if is_vat_payer is True:
return "RO" + bare_cui
return bare_cui
def normalize_company_name(name: str) -> str:
"""Normalize company name for comparison: strip SRL/SA suffixes, diacritics, punctuation."""
if not name:
return ""
result = name.strip().upper()
# Strip diacritics
result = result.translate(_DIACRITICS)
# Remove common suffixes
result = re.sub(r'\b(S\.?R\.?L\.?|S\.?A\.?|S\.?C\.?|S\.?N\.?C\.?|S\.?C\.?S\.?)\b', '', result)
# Remove punctuation and extra spaces
result = re.sub(r'[^\w\s]', '', result)
result = re.sub(r'\s+', ' ', result).strip()
return result

View File

@@ -201,7 +201,7 @@ def build_articles_json(items, order=None, settings=None) -> str:
return json.dumps(articles)
def import_single_order(order, id_pol: int = None, id_sectie: int = None, app_settings: dict = None, id_gestiuni: list[int] = None) -> dict:
def import_single_order(order, id_pol: int = None, id_sectie: int = None, app_settings: dict = None, id_gestiuni: list[int] = None, cod_fiscal_override: str = None) -> dict:
"""Import a single order into Oracle ROA.
Returns dict with:
@@ -239,7 +239,7 @@ def import_single_order(order, id_pol: int = None, id_sectie: int = None, app_se
if order.billing.is_company:
denumire = clean_web_text(order.billing.company_name).upper()
cod_fiscal = clean_web_text(order.billing.company_code) or None
cod_fiscal = cod_fiscal_override or clean_web_text(order.billing.company_code) or None
registru = clean_web_text(order.billing.company_reg) or None
is_pj = 1
else:
@@ -267,6 +267,12 @@ def import_single_order(order, id_pol: int = None, id_sectie: int = None, app_se
result["id_partener"] = int(partner_id)
# Query partner data from Oracle for sync back to SQLite
cur.execute("SELECT denumire, cod_fiscal FROM nom_parteneri WHERE id_part = :1", [partner_id])
row = cur.fetchone()
result["denumire_roa"] = row[0] if row else None
result["cod_fiscal_roa"] = row[1] if row else None
# Determine if billing and shipping are different persons
billing_name = clean_web_text(
f"{order.billing.lastname} {order.billing.firstname}"
@@ -350,6 +356,16 @@ def import_single_order(order, id_pol: int = None, id_sectie: int = None, app_se
if addr_livr_id is not None:
result["id_adresa_livrare"] = int(addr_livr_id)
# Query address details from Oracle for sync back to SQLite
if addr_livr_id:
cur.execute("SELECT strada, numar, localitate, judet FROM vadrese_parteneri WHERE id_adresa = :1", [int(addr_livr_id)])
row = cur.fetchone()
result["adresa_livrare_roa"] = {"strada": row[0], "numar": row[1], "localitate": row[2], "judet": row[3]} if row else None
if addr_fact_id and addr_fact_id != addr_livr_id:
cur.execute("SELECT strada, numar, localitate, judet FROM vadrese_parteneri WHERE id_adresa = :1", [int(addr_fact_id)])
row = cur.fetchone()
result["adresa_facturare_roa"] = {"strada": row[0], "numar": row[1], "localitate": row[2], "judet": row[3]} if row else None
# Step 4: Build articles JSON and import order
articles_json = build_articles_json(order.items, order, app_settings)

View File

@@ -1009,3 +1009,161 @@ async def get_price_sync_runs(page: int = 1, per_page: int = 20):
return {"runs": runs, "total": total, "page": page, "pages": (total + per_page - 1) // per_page}
finally:
await db.close()
# ── ANAF Cache ───────────────────────────────────
async def get_anaf_cache(bare_cui: str) -> dict | None:
"""Get cached ANAF data for a CUI (valid for 7 days)."""
db = await get_sqlite()
try:
cursor = await db.execute("""
SELECT scp_tva, denumire_anaf, checked_at
FROM anaf_cache
WHERE cui = ? AND checked_at > datetime('now', '-7 days')
""", (bare_cui,))
row = await cursor.fetchone()
if not row:
return None
return {
"scpTVA": bool(row["scp_tva"]) if row["scp_tva"] is not None else None,
"denumire_anaf": row["denumire_anaf"] or "",
"checked_at": row["checked_at"],
}
finally:
await db.close()
async def upsert_anaf_cache(cui: str, scp_tva: int | None, denumire_anaf: str):
"""Insert or update ANAF cache entry."""
db = await get_sqlite()
try:
await db.execute("""
INSERT OR REPLACE INTO anaf_cache (cui, scp_tva, denumire_anaf, checked_at)
VALUES (?, ?, ?, datetime('now'))
""", (cui, scp_tva, denumire_anaf))
await db.commit()
finally:
await db.close()
async def bulk_populate_anaf_cache(results: dict[str, dict]):
"""Batch insert/update ANAF cache entries.
results format: {cui: {"scpTVA": bool|None, "denumire_anaf": str, "checked_at": str}, ...}
"""
if not results:
return
db = await get_sqlite()
try:
rows = []
for cui, data in results.items():
scp = None
if data.get("scpTVA") is True:
scp = 1
elif data.get("scpTVA") is False:
scp = 0
rows.append((cui, scp, data.get("denumire_anaf", ""), data.get("checked_at", _now_str())))
await db.executemany("""
INSERT OR REPLACE INTO anaf_cache (cui, scp_tva, denumire_anaf, checked_at)
VALUES (?, ?, ?, ?)
""", rows)
await db.commit()
finally:
await db.close()
# ── Partner/Address Data on Orders ─────────────────
async def update_order_partner_data(order_number: str, partner_data: dict):
"""Update order with partner/ANAF/address comparison data.
partner_data keys: cod_fiscal_gomag, cod_fiscal_roa, denumire_roa,
anaf_platitor_tva, anaf_checked_at, anaf_cod_fiscal_adjusted,
adresa_livrare_gomag, adresa_facturare_gomag, adresa_livrare_roa,
adresa_facturare_roa, anaf_denumire_mismatch, denumire_anaf
"""
db = await get_sqlite()
try:
await db.execute("""
UPDATE orders SET
cod_fiscal_gomag = ?,
cod_fiscal_roa = ?,
denumire_roa = ?,
anaf_platitor_tva = ?,
anaf_checked_at = ?,
anaf_cod_fiscal_adjusted = ?,
adresa_livrare_gomag = ?,
adresa_facturare_gomag = ?,
adresa_livrare_roa = ?,
adresa_facturare_roa = ?,
anaf_denumire_mismatch = ?,
denumire_anaf = ?,
updated_at = datetime('now')
WHERE order_number = ?
""", (
partner_data.get("cod_fiscal_gomag"),
partner_data.get("cod_fiscal_roa"),
partner_data.get("denumire_roa"),
partner_data.get("anaf_platitor_tva"),
partner_data.get("anaf_checked_at"),
partner_data.get("anaf_cod_fiscal_adjusted", 0),
partner_data.get("adresa_livrare_gomag"),
partner_data.get("adresa_facturare_gomag"),
partner_data.get("adresa_livrare_roa"),
partner_data.get("adresa_facturare_roa"),
partner_data.get("anaf_denumire_mismatch", 0),
partner_data.get("denumire_anaf"),
order_number,
))
await db.commit()
finally:
await db.close()
# ── Address Quality Cache (via app_settings) ──────
async def get_incomplete_addresses_count() -> int:
"""Get cached count of orders with incomplete ROA addresses.
Returns -1 if cache is stale (> 1 hour old) or not set.
"""
db = await get_sqlite()
try:
cursor = await db.execute(
"SELECT value FROM app_settings WHERE key = 'incomplete_addresses_checked_at'"
)
row = await cursor.fetchone()
if not row or not row["value"]:
return -1
# Check freshness
from datetime import datetime, timedelta
try:
checked_at = datetime.fromisoformat(row["value"])
if datetime.now() - checked_at > timedelta(hours=1):
return -1
except (ValueError, TypeError):
return -1
cursor = await db.execute(
"SELECT value FROM app_settings WHERE key = 'incomplete_addresses_count'"
)
row = await cursor.fetchone()
return int(row["value"]) if row and row["value"] else 0
finally:
await db.close()
async def set_incomplete_addresses_count(count: int):
"""Cache incomplete addresses count in app_settings."""
db = await get_sqlite()
try:
await db.execute(
"INSERT OR REPLACE INTO app_settings (key, value) VALUES ('incomplete_addresses_count', ?)",
(str(count),)
)
await db.execute(
"INSERT OR REPLACE INTO app_settings (key, value) VALUES ('incomplete_addresses_checked_at', ?)",
(_now_str(),)
)
await db.commit()
finally:
await db.close()

View File

@@ -12,7 +12,7 @@ def _now():
"""Return current time in Bucharest timezone (naive, for display/storage)."""
return datetime.now(_tz_bucharest).replace(tzinfo=None)
from . import order_reader, validation_service, import_service, sqlite_service, invoice_service, gomag_client
from . import order_reader, validation_service, import_service, sqlite_service, invoice_service, gomag_client, anaf_service
from ..config import settings
from .. import database
@@ -638,7 +638,51 @@ async def run_sync(id_pol: int = None, id_sectie: int = None, run_id: str = None
0, len(truly_importable),
{"imported": 0, "skipped": skipped_count, "errors": 0, "already_imported": already_imported_count})
# Step 4: Import only truly new orders
# ANAF cache pre-population check
try:
db_check = await sqlite_service.get_sqlite()
try:
cursor = await db_check.execute("SELECT COUNT(*) FROM anaf_cache WHERE checked_at > datetime('now', '-7 days')")
row = await cursor.fetchone()
cache_count = row[0] if row else 0
finally:
await db_check.close()
if cache_count < 10:
_log_line(run_id, "ANAF pre-populare cache...")
except Exception as e:
logger.warning(f"ANAF cache pre-population check failed: {e}")
# Step 4: ANAF batch verification for company CUIs
company_cuis = set()
for order in truly_importable:
if order.billing.is_company and order.billing.company_code:
raw_cf = import_service.clean_web_text(order.billing.company_code) or ""
bare = anaf_service.strip_ro_prefix(raw_cf)
if anaf_service.validate_cui(bare):
company_cuis.add(bare)
# Check anaf_cache for already-known CUIs (7-day validity)
uncached_cuis = []
cached_results = {}
for cui in company_cuis:
cached = await sqlite_service.get_anaf_cache(cui)
if cached:
cached_results[cui] = cached
else:
uncached_cuis.append(cui)
# Batch ANAF call for uncached CUIs only
if uncached_cuis:
_log_line(run_id, f"ANAF: verificare {len(uncached_cuis)} CUI-uri noi...")
anaf_results = await anaf_service.check_vat_status_batch(uncached_cuis)
if anaf_results:
await sqlite_service.bulk_populate_anaf_cache(anaf_results)
cached_results.update(anaf_results)
else:
_log_line(run_id, "ANAF: batch call esuat, continua fara corectie CUI")
# Step 5: Import only truly new orders
imported_count = 0
error_count = 0
@@ -651,10 +695,25 @@ async def run_sync(id_pol: int = None, id_sectie: int = None, run_id: str = None
{"imported": imported_count, "skipped": len(skipped), "errors": error_count,
"already_imported": already_imported_count})
# Determine cod_fiscal override from ANAF data
cod_fiscal_override = None
anaf_data_for_order = None
raw_cf = ""
if order.billing.is_company and order.billing.company_code:
raw_cf = import_service.clean_web_text(order.billing.company_code) or ""
bare_cui = anaf_service.strip_ro_prefix(raw_cf)
anaf_data_for_order = cached_results.get(bare_cui)
if anaf_data_for_order and anaf_data_for_order.get("scpTVA") is not None:
correct_cf = anaf_service.determine_correct_cod_fiscal(bare_cui, anaf_data_for_order["scpTVA"])
if correct_cf != raw_cf:
_log_line(run_id, f"#{order.number} CUI corectat: {raw_cf}{correct_cf}")
cod_fiscal_override = correct_cf
result = await asyncio.to_thread(
import_service.import_single_order,
order, id_pol=id_pol, id_sectie=id_sectie,
app_settings=app_settings, id_gestiuni=id_gestiuni
app_settings=app_settings, id_gestiuni=id_gestiuni,
cod_fiscal_override=cod_fiscal_override
)
# Build order items data for storage (R9)
@@ -702,7 +761,34 @@ async def run_sync(id_pol: int = None, id_sectie: int = None, run_id: str = None
)
await sqlite_service.add_order_items(order.number, order_items_data)
_log_line(run_id, f"#{order.number} [{order.date or '?'}] {customer} → IMPORTAT (ID: {result['id_comanda']})")
else:
# Save partner + ANAF + address data to SQLite
if result["success"] or result.get("id_partener"):
partner_data = {
"cod_fiscal_gomag": raw_cf if order.billing.is_company else None,
"cod_fiscal_roa": result.get("cod_fiscal_roa"),
"denumire_roa": result.get("denumire_roa"),
"anaf_platitor_tva": (1 if anaf_data_for_order.get("scpTVA") else 0) if anaf_data_for_order and anaf_data_for_order.get("scpTVA") is not None else None,
"anaf_checked_at": anaf_data_for_order.get("checked_at") if anaf_data_for_order else None,
"anaf_cod_fiscal_adjusted": 1 if cod_fiscal_override and cod_fiscal_override != raw_cf else 0,
"adresa_livrare_gomag": json.dumps({"address": order.shipping.address, "city": order.shipping.city, "region": order.shipping.region}) if order.shipping else None,
"adresa_facturare_gomag": json.dumps({"address": order.billing.address, "city": order.billing.city, "region": order.billing.region}),
"adresa_livrare_roa": json.dumps(result.get("adresa_livrare_roa")) if result.get("adresa_livrare_roa") else None,
"adresa_facturare_roa": json.dumps(result.get("adresa_facturare_roa")) if result.get("adresa_facturare_roa") else None,
"anaf_denumire_mismatch": 0,
"denumire_anaf": None,
}
# Denomination mismatch check
if anaf_data_for_order and anaf_data_for_order.get("denumire_anaf") and order.billing.is_company:
norm_gomag = anaf_service.normalize_company_name(order.billing.company_name or "")
norm_anaf = anaf_service.normalize_company_name(anaf_data_for_order["denumire_anaf"])
if norm_gomag and norm_anaf and norm_gomag != norm_anaf:
partner_data["anaf_denumire_mismatch"] = 1
partner_data["denumire_anaf"] = anaf_data_for_order["denumire_anaf"]
await sqlite_service.update_order_partner_data(order.number, partner_data)
if not result["success"]:
error_count += 1
await sqlite_service.upsert_order(
sync_run_id=run_id,

View File

@@ -1095,3 +1095,93 @@ tr.mapping-deleted td {
color: var(--info);
text-decoration: underline;
}
/* ── Partner/Address section headers (ANAF dedup) ── */
.detail-section-header {
font-family: var(--font-display);
font-size: 12px;
font-weight: 500;
text-transform: uppercase;
letter-spacing: 0.04em;
color: var(--text-secondary);
padding: 10px 0;
border-bottom: 1px solid var(--border);
cursor: pointer;
display: flex;
align-items: center;
gap: 8px;
}
.detail-section-header:hover { color: var(--text-primary); }
.detail-section-header .bi-chevron-right {
transition: transform 150ms ease-out;
font-size: 10px;
}
.detail-section-header[aria-expanded="true"] .bi-chevron-right {
transform: rotate(90deg);
}
.detail-section-header .alert-count {
font-family: var(--font-body);
font-size: 11px;
font-weight: 500;
background: var(--error-light);
color: var(--error-text);
padding: 2px 8px;
border-radius: 9999px;
margin-left: auto;
}
.detail-section-body { padding: 12px 0; }
.partner-row { display: flex; gap: 24px; flex-wrap: wrap; margin-bottom: 8px; }
.partner-field { min-width: 140px; }
.partner-label {
font-family: var(--font-display);
font-size: 12px;
font-weight: 500;
color: var(--text-muted);
text-transform: uppercase;
}
.partner-value {
font-family: var(--font-data);
font-size: 13px;
color: var(--text-primary);
}
.anaf-badge {
display: inline-block;
font-family: var(--font-body);
font-size: 12px;
font-weight: 500;
padding: 2px 8px;
border-radius: 9999px;
}
.anaf-badge-ok { background: var(--success-light); color: var(--success-text); }
.anaf-badge-warn { background: var(--warning-light); color: var(--warning-text); }
.anaf-badge-gray { background: var(--cancelled-light); color: var(--text-muted); }
.addr-table { width: 100%; border-collapse: collapse; font-size: 13px; }
.addr-table th {
font-family: var(--font-display);
font-size: 12px;
font-weight: 500;
text-transform: uppercase;
color: var(--text-muted);
padding: 6px 8px;
text-align: left;
}
.addr-table td { padding: 8px; vertical-align: top; font-family: var(--font-body); }
.addr-mismatch { background: var(--warning-light) !important; }
.addr-efactura-risk { background: var(--error-light) !important; }
.addr-label {
font-family: var(--font-display);
font-size: 11px;
font-weight: 500;
text-transform: uppercase;
color: var(--text-secondary);
margin-bottom: 4px;
}
/* Mobile address cards */
.addr-card { border: 1px solid var(--border); border-radius: var(--card-radius); margin-bottom: 8px; overflow: hidden; }
.addr-card-header { padding: 6px 10px; font-family: var(--font-display); font-size: 11px; font-weight: 500; text-transform: uppercase; color: var(--text-secondary); background: var(--surface-raised); }
.addr-card-row { padding: 8px 10px; }
.addr-card-row + .addr-card-row { border-top: 1px dashed var(--border-subtle); }
.addr-card-source { font-size: 11px; font-weight: 500; color: var(--text-muted); margin-bottom: 2px; }
.addr-card-text { font-family: var(--font-body); font-size: 13px; }
.addr-card.mismatch { border-left: 3px solid var(--warning); }
.addr-card.match .addr-match-label { font-size: 11px; color: var(--success-text); }

View File

@@ -337,13 +337,16 @@ async function loadDashOrders() {
const unmapped = c.unresolved_skus || 0;
const nefact = c.nefacturate || 0;
if (errors === 0 && unmapped === 0 && nefact === 0) {
const incompleteAddr = c.incomplete_addresses || 0;
if (errors === 0 && unmapped === 0 && nefact === 0 && incompleteAddr === 0) {
attnEl.innerHTML = '<div class="attention-card attention-ok"><i class="bi bi-check-circle"></i> Totul in ordine</div>';
} else {
let items = [];
if (errors > 0) items.push(`<span class="attention-item attention-error" onclick="document.querySelector('.filter-pill[data-status=ERROR]')?.click()"><i class="bi bi-exclamation-triangle"></i> ${errors} erori import</span>`);
if (unmapped > 0) items.push(`<span class="attention-item attention-warning" onclick="window.location='${window.ROOT_PATH||''}/missing-skus'"><i class="bi bi-puzzle"></i> ${unmapped} SKU-uri nemapate</span>`);
if (nefact > 0) items.push(`<span class="attention-item attention-warning" onclick="document.querySelector('.filter-pill[data-status=UNINVOICED]')?.click()"><i class="bi bi-receipt"></i> ${nefact} nefacturate</span>`);
if (c.incomplete_addresses > 0) items.push(`<span class="attention-item attention-warning"><i class="bi bi-geo-alt"></i> ${c.incomplete_addresses} adrese incomplete</span>`);
attnEl.innerHTML = '<div class="attention-card attention-alert">' + items.join('') + '</div>';
}
}

View File

@@ -519,6 +519,15 @@ async function renderOrderDetailModal(orderNumber, opts) {
if (priceCheckEl) priceCheckEl.innerHTML = '';
const reconEl = document.getElementById('detailInvoiceRecon');
if (reconEl) { reconEl.innerHTML = ''; reconEl.style.display = 'none'; }
// Reset partner/address sections
const partnerSection = document.getElementById('detailPartnerSection');
if (partnerSection) partnerSection.style.display = 'none';
const addressSection = document.getElementById('detailAddressSection');
if (addressSection) addressSection.style.display = 'none';
const partnerBody = document.getElementById('partnerInfoBody');
if (partnerBody) partnerBody.innerHTML = '';
const addressBody = document.getElementById('addressInfoBody');
if (addressBody) addressBody.innerHTML = '';
const modalEl = document.getElementById('orderDetailModal');
const existing = bootstrap.Modal.getInstance(modalEl);
@@ -583,6 +592,10 @@ async function renderOrderDetailModal(orderNumber, opts) {
reconEl.style.display = 'none';
}
// Render partner + address sections
_renderPartnerSection(order);
_renderAddressSection(order);
if (order.error_message) {
document.getElementById('detailError').textContent = order.error_message;
document.getElementById('detailError').style.display = '';
@@ -817,3 +830,198 @@ function statusDot(status) {
return '<span class="dot dot-gray"></span>';
}
}
// ── Partner & Address Section Rendering ──────────
function _renderPartnerSection(order) {
const section = document.getElementById('detailPartnerSection');
const body = document.getElementById('partnerInfoBody');
const alertEl = document.getElementById('partnerAlertCount');
if (!section || !body) return;
const pi = order.partner_info;
if (!pi || !pi.cod_fiscal_gomag) {
section.style.display = 'none';
return;
}
section.style.display = '';
let alertCount = 0;
if (pi.anaf_cod_fiscal_adjusted) alertCount++;
if (pi.anaf_denumire_mismatch) alertCount++;
if (alertEl) {
if (alertCount > 0) {
alertEl.textContent = alertCount + (alertCount === 1 ? ' alerta' : ' alerte');
alertEl.style.display = '';
} else {
alertEl.style.display = 'none';
}
}
// ANAF badge
let anafBadge;
if (pi.anaf_platitor_tva === 1) {
anafBadge = '<span class="anaf-badge anaf-badge-ok">Platitor TVA</span>';
} else if (pi.anaf_platitor_tva === 0) {
anafBadge = '<span class="anaf-badge anaf-badge-warn">Neplatitor TVA</span>';
} else {
anafBadge = '<span class="anaf-badge anaf-badge-gray">Neverificat</span>';
}
// CUI correction badge
let cuiCorrBadge = '';
if (pi.anaf_cod_fiscal_adjusted) {
cuiCorrBadge = ' <span class="anaf-badge anaf-badge-warn"><i class="bi bi-arrow-left-right"></i> Corectat ANAF</span>';
}
// Denomination mismatch
let denomHtml = '';
if (pi.anaf_denumire_mismatch && pi.denumire_anaf) {
denomHtml = `<div style="background:var(--warning-light);padding:8px 12px;border-radius:var(--card-radius);margin-top:8px">
<span class="partner-label" style="color:var(--warning-text)"><i class="bi bi-exclamation-triangle"></i> Denumire diferita</span><br>
<span style="font-size:13px">GoMag: <strong>${esc(order.customer_name || '')}</strong></span><br>
<span style="font-size:13px">ANAF: <strong>${esc(pi.denumire_anaf)}</strong></span>
</div>`;
}
body.innerHTML = `
<div class="partner-row">
<div class="partner-field">
<div class="partner-label">CUI GoMag</div>
<div class="partner-value">${esc(pi.cod_fiscal_gomag)}</div>
</div>
<div class="partner-field">
<div class="partner-label">CUI ROA</div>
<div class="partner-value">${esc(pi.cod_fiscal_roa || '-')}${cuiCorrBadge}</div>
</div>
<div class="partner-field">
<div class="partner-label">Partener ROA</div>
<div style="font-family:var(--font-body);font-size:14px;font-weight:500">${esc(pi.denumire_roa || '-')}</div>
</div>
</div>
<div class="partner-row">
<div class="partner-field">
<div class="partner-label">ANAF</div>
<div>${anafBadge}</div>
</div>
</div>
${denomHtml}`;
// Auto-expand on mismatch
if (alertCount > 0) {
const collapseEl = document.getElementById('detailPartnerInfo');
if (collapseEl && !collapseEl.classList.contains('show')) {
new bootstrap.Collapse(collapseEl, { show: true });
}
}
}
function _renderAddressSection(order) {
const section = document.getElementById('detailAddressSection');
const body = document.getElementById('addressInfoBody');
const alertEl = document.getElementById('addressAlertCount');
if (!section || !body) return;
const addr = order.addresses;
if (!addr || (!addr.livrare_gomag && !addr.facturare_gomag)) {
section.style.display = 'none';
return;
}
section.style.display = '';
let mismatchCount = 0;
function fmtAddr(a) {
if (!a) return '-';
if (typeof a === 'string') return esc(a);
const parts = [a.address || a.strada || '', a.numar || ''].filter(Boolean);
const line1 = parts.join(' ').trim();
const line2 = [a.city || a.localitate || '', a.region || a.judet || ''].filter(Boolean).join(', ');
return esc(line1) + (line2 ? '<br>' + esc(line2) : '');
}
function addrMatch(gomag, roa) {
if (!gomag || !roa) return true; // can't compare
const g = JSON.stringify(gomag).toUpperCase().replace(/[^A-Z0-9]/g, '');
const r = JSON.stringify(roa).toUpperCase().replace(/[^A-Z0-9]/g, '');
return g === r;
}
function hasEfacturaRisk(roa) {
if (!roa || typeof roa === 'string') return false;
return !roa.judet || !roa.localitate;
}
const livrMatch = addrMatch(addr.livrare_gomag, addr.livrare_roa);
const factMatch = addrMatch(addr.facturare_gomag, addr.facturare_roa);
if (!livrMatch) mismatchCount++;
if (!factMatch) mismatchCount++;
const livrRisk = hasEfacturaRisk(addr.livrare_roa);
const factRisk = hasEfacturaRisk(addr.facturare_roa);
if (alertEl) {
if (mismatchCount > 0) {
alertEl.textContent = mismatchCount + (mismatchCount === 1 ? ' diferenta' : ' diferente');
alertEl.style.display = '';
} else {
alertEl.style.display = 'none';
}
}
// Desktop: 2-column table
const livrClass = livrRisk ? 'addr-efactura-risk' : (!livrMatch ? 'addr-mismatch' : '');
const factClass = factRisk ? 'addr-efactura-risk' : (!factMatch ? 'addr-mismatch' : '');
const desktopHtml = `
<table class="addr-table d-none d-md-table">
<thead><tr><th></th><th>GOMAG</th><th>ROA</th></tr></thead>
<tbody>
<tr class="${livrClass}">
<td><span class="addr-label">LIVRARE</span>${livrRisk ? '<br><small style="color:var(--error-text)">⚠ Risc eFactura</small>' : ''}</td>
<td>${fmtAddr(addr.livrare_gomag)}</td>
<td>${fmtAddr(addr.livrare_roa)}</td>
</tr>
<tr class="${factClass}">
<td><span class="addr-label">FACTURARE</span>${factRisk ? '<br><small style="color:var(--error-text)">⚠ Risc eFactura</small>' : ''}</td>
<td>${fmtAddr(addr.facturare_gomag)}</td>
<td>${fmtAddr(addr.facturare_roa)}</td>
</tr>
</tbody>
</table>`;
// Mobile: stacked cards
function mobileCard(label, gomag, roa, isMatch, isRisk) {
const cls = isRisk ? ' addr-efactura-risk' : (!isMatch ? ' mismatch' : ' match');
const matchLabel = isMatch ? '<div class="addr-match-label">✓ Adrese identice</div>' : '';
const riskLabel = isRisk ? '<div style="font-size:11px;color:var(--error-text)">⚠ Risc eFactura</div>' : '';
return `<div class="addr-card${cls}">
<div class="addr-card-header">${label}</div>
<div class="addr-card-row">
<div class="addr-card-source">GoMag:</div>
<div class="addr-card-text">${fmtAddr(gomag)}</div>
</div>
<div class="addr-card-row">
<div class="addr-card-source">ROA:</div>
<div class="addr-card-text">${fmtAddr(roa)}</div>
</div>
${matchLabel}${riskLabel}
</div>`;
}
const mobileHtml = `<div class="d-md-none">
${mobileCard('LIVRARE', addr.livrare_gomag, addr.livrare_roa, livrMatch, livrRisk)}
${mobileCard('FACTURARE', addr.facturare_gomag, addr.facturare_roa, factMatch, factRisk)}
</div>`;
body.innerHTML = desktopHtml + mobileHtml;
// Auto-expand on mismatch
if (mismatchCount > 0) {
const collapseEl = document.getElementById('detailAddressInfo');
if (collapseEl && !collapseEl.classList.contains('show')) {
new bootstrap.Collapse(collapseEl, { show: true });
}
}
}

View File

@@ -19,7 +19,7 @@
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css" rel="stylesheet">
<link href="https://cdn.jsdelivr.net/npm/bootstrap-icons@1.11.2/font/bootstrap-icons.css" rel="stylesheet">
{% set rp = request.scope.get('root_path', '') %}
<link href="{{ rp }}/static/css/style.css?v=25" rel="stylesheet">
<link href="{{ rp }}/static/css/style.css?v=26" rel="stylesheet">
</head>
<body>
<!-- Top Navbar (hidden on mobile via CSS) -->
@@ -110,6 +110,32 @@
</div>
</div>
</div>
<!-- Partner Info Section (PJ only) -->
<div id="detailPartnerSection" style="display:none" class="mb-3">
<div class="detail-section-header" role="button" tabindex="0"
data-bs-toggle="collapse" data-bs-target="#detailPartnerInfo"
aria-expanded="false" aria-controls="detailPartnerInfo">
<i class="bi bi-chevron-right"></i>
<span>DETALII PARTENER</span>
<span id="partnerAlertCount" class="alert-count" style="display:none"></span>
</div>
<div class="collapse" id="detailPartnerInfo">
<div class="detail-section-body" id="partnerInfoBody"></div>
</div>
</div>
<!-- Address Comparison Section -->
<div id="detailAddressSection" style="display:none" class="mb-3">
<div class="detail-section-header" role="button" tabindex="0"
data-bs-toggle="collapse" data-bs-target="#detailAddressInfo"
aria-expanded="false" aria-controls="detailAddressInfo">
<i class="bi bi-chevron-right"></i>
<span>ADRESE COMPARATIV</span>
<span id="addressAlertCount" class="alert-count" style="display:none"></span>
</div>
<div class="collapse" id="detailAddressInfo">
<div class="detail-section-body" id="addressInfoBody"></div>
</div>
</div>
<div class="table-responsive d-none d-md-block">
<table class="table table-sm table-bordered mb-0">
<thead class="table-light">
@@ -144,7 +170,7 @@
<script>window.ROOT_PATH = "{{ rp }}";</script>
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js"></script>
<script src="{{ rp }}/static/js/shared.js?v=20"></script>
<script src="{{ rp }}/static/js/shared.js?v=21"></script>
<script>
// Dark mode toggle
function toggleDarkMode() {

View File

@@ -2,6 +2,7 @@ CREATE OR REPLACE PACKAGE PACK_IMPORT_PARTENERI AS
-- 20.03.2026 - import parteneri GoMag: PJ/PF, shipping/billing, cautare/creare automata
-- 31.03.2026 - parser inteligent adrese: split numar in bloc/scara/apart/etaj (fix ORA-12899 pe NUMAR max 10 chars)
-- 01.04.2026 - ANAF dedup: cautare duala CUI, adrese pe strada+diacritics, strip diacritics la stocare
-- ====================================================================
-- CONSTANTS
@@ -146,10 +147,25 @@ CREATE OR REPLACE PACKAGE PACK_IMPORT_PARTENERI AS
*/
PROCEDURE clear_error;
FUNCTION strip_diacritics(p_text IN VARCHAR2) RETURN VARCHAR2;
END PACK_IMPORT_PARTENERI;
/
CREATE OR REPLACE PACKAGE BODY PACK_IMPORT_PARTENERI AS
-- 01.04.2026 - strip_diacritics la stocare adrese si parteneri
FUNCTION strip_diacritics(p_text IN VARCHAR2) RETURN VARCHAR2 IS
BEGIN
IF p_text IS NULL THEN
RETURN NULL;
END IF;
RETURN TRANSLATE(
UPPER(TRIM(p_text)),
'ĂăÂâÎîȘșȚțŞşŢţ',
'AAAAIISSTTSSTT'
);
END strip_diacritics;
-- ================================================================
-- ERROR MANAGEMENT FUNCTIONS IMPLEMENTATION
-- ================================================================
@@ -212,57 +228,52 @@ CREATE OR REPLACE PACKAGE BODY PACK_IMPORT_PARTENERI AS
-- PUBLIC FUNCTIONS IMPLEMENTATION
-- ====================================================================
-- 01.04.2026 - cautare duala cod_fiscal cu/fara prefix RO (anti-duplicare parteneri)
FUNCTION cauta_partener_dupa_cod_fiscal(p_cod_fiscal IN VARCHAR2)
RETURN NUMBER IS
v_id_part NUMBER;
v_cod_fiscal_curat VARCHAR2(50);
v_bare_cui VARCHAR2(50);
v_ro_cui VARCHAR2(52);
BEGIN
-- Validare input
IF p_cod_fiscal IS NULL OR
LENGTH(TRIM(p_cod_fiscal)) < C_MIN_COD_FISCAL THEN
IF p_cod_fiscal IS NULL OR LENGTH(TRIM(p_cod_fiscal)) < C_MIN_COD_FISCAL THEN
RETURN NULL;
END IF;
v_cod_fiscal_curat := curata_text_cautare(p_cod_fiscal);
-- pINFO('Cautare partener dupa cod_fiscal: ' || v_cod_fiscal_curat, 'IMPORT_PARTENERI');
-- Cautare in NOM_PARTENERI
v_cod_fiscal_curat := UPPER(TRIM(p_cod_fiscal));
-- Extract bare CUI (without RO prefix)
IF REGEXP_LIKE(v_cod_fiscal_curat, '^RO\s*\d') THEN
v_bare_cui := TRIM(REGEXP_REPLACE(v_cod_fiscal_curat, '^RO\s*', ''));
ELSE
v_bare_cui := v_cod_fiscal_curat;
END IF;
v_ro_cui := 'RO' || v_bare_cui;
-- 01.04.2026 - cautare duala cod_fiscal cu/fara prefix RO (anti-duplicare parteneri)
-- Search 3 forms: bare, RO+bare, RO+space+bare (index-friendly)
-- Priority: active + exact form > active + alternate > inactive
BEGIN
SELECT id_part
INTO v_id_part
SELECT id_part INTO v_id_part FROM (
SELECT id_part
FROM nom_parteneri
WHERE UPPER(TRIM(cod_fiscal)) = v_cod_fiscal_curat
AND ROWNUM = 1; -- In caz de duplicate, luam primul
-- pINFO('Gasit partener cu cod_fiscal ' || v_cod_fiscal_curat || ': ID_PART=' || v_id_part, 'IMPORT_PARTENERI');
WHERE UPPER(TRIM(cod_fiscal)) IN (v_bare_cui, v_ro_cui, 'RO ' || v_bare_cui)
AND NVL(sters, 0) = 0
ORDER BY NVL(inactiv, 0) ASC,
CASE WHEN UPPER(TRIM(cod_fiscal)) = v_cod_fiscal_curat THEN 0 ELSE 1 END ASC,
id_part DESC
) WHERE ROWNUM = 1;
RETURN v_id_part;
EXCEPTION
WHEN NO_DATA_FOUND THEN
-- pINFO('Nu s-a gasit partener cu cod_fiscal: ' || v_cod_fiscal_curat, 'IMPORT_PARTENERI');
RETURN NULL;
WHEN TOO_MANY_ROWS THEN
-- Luam primul gasit
SELECT id_part
INTO v_id_part
FROM (SELECT id_part
FROM nom_parteneri
WHERE UPPER(TRIM(cod_fiscal)) = v_cod_fiscal_curat
ORDER BY id_part)
WHERE ROWNUM = 1;
pINFO('WARNING: Multiple parteneri cu acelasi cod_fiscal ' ||
v_cod_fiscal_curat || '. Selectat ID_PART=' || v_id_part,
'IMPORT_PARTENERI');
RETURN v_id_part;
END;
EXCEPTION
WHEN OTHERS THEN
pINFO('ERROR in cauta_partener_dupa_cod_fiscal: ' || SQLERRM,
'IMPORT_PARTENERI');
pINFO('ERROR in cauta_partener_dupa_cod_fiscal: ' || SQLERRM, 'IMPORT_PARTENERI');
RAISE;
END cauta_partener_dupa_cod_fiscal;
@@ -677,6 +688,9 @@ CREATE OR REPLACE PACKAGE BODY PACK_IMPORT_PARTENERI AS
-- pINFO('Nume separat: NUME=' || NVL(v_nume, 'NULL') || ', PRENUME=' || NVL(v_prenume, 'NULL'), 'IMPORT_PARTENERI');
END IF;
-- Strip diacritics from partner name before storage
v_denumire_curata := strip_diacritics(v_denumire_curata);
-- Creare partener prin pack_def
BEGIN
IF v_este_persoana_fizica = 1 THEN
@@ -797,30 +811,37 @@ CREATE OR REPLACE PACKAGE BODY PACK_IMPORT_PARTENERI AS
v_apart,
v_etaj);
-- caut prima adresa dupa judet si localitate, ordonate dupa principala = 1
-- 01.04.2026 - cautare adresa pe strada + diacritics + id_loc validation
-- TIER 1: county + city + street (diacritics normalized) + valid id_loc
begin
select max(id_adresa) over(order by principala desc)
into p_id_adresa
from vadrese_parteneri
where id_part = p_id_part
and judet = v_judet
and localitate = v_localitate;
exception
WHEN NO_DATA_FOUND THEN
p_id_adresa := null;
end;
-- caut prima adresa dupa judet, ordonate dupa principala = 1
if p_id_adresa is null then
begin
select max(id_adresa) over(order by principala desc)
into p_id_adresa
select id_adresa into p_id_adresa from (
select id_adresa
from vadrese_parteneri
where id_part = p_id_part
and judet = v_judet;
and judet = v_judet
and localitate = v_localitate
and strip_diacritics(strada) = strip_diacritics(v_strada)
and id_loc IS NOT NULL
order by principala desc, id_adresa desc
) where rownum = 1;
exception
when NO_DATA_FOUND then p_id_adresa := null;
end;
-- TIER 2: county + city (no street) but ONLY with valid id_loc
if p_id_adresa is null then
begin
select id_adresa into p_id_adresa from (
select id_adresa
from vadrese_parteneri
where id_part = p_id_part
and judet = v_judet
and localitate = v_localitate
and id_loc IS NOT NULL
order by principala desc, id_adresa desc
) where rownum = 1;
exception
WHEN NO_DATA_FOUND THEN
p_id_adresa := null;
when NO_DATA_FOUND then p_id_adresa := null;
end;
end if;
@@ -870,6 +891,12 @@ CREATE OR REPLACE PACKAGE BODY PACK_IMPORT_PARTENERI AS
end;
end;
-- 01.04.2026 - strip_diacritics la stocare adrese
v_strada := strip_diacritics(v_strada);
v_localitate := strip_diacritics(v_localitate);
v_numar := strip_diacritics(v_numar);
v_bloc := strip_diacritics(v_bloc);
BEGIN
pack_def.adauga_adresa_partener2(tnId_part => p_id_part,
tcDenumire_adresa => NULL,

View File

@@ -0,0 +1,49 @@
#!/usr/bin/env python3
"""One-time script to find duplicate partners by CUI (bare number, ignoring RO prefix)."""
import sys, os, csv
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
# Setup Oracle env same as start.sh
from api.app import database
def scan_duplicates():
database.init_oracle()
conn = database.get_oracle_connection()
try:
with conn.cursor() as cur:
cur.execute("""
SELECT bare_cui, COUNT(*) as cnt,
LISTAGG(id_part||':'||denumire, ', ') WITHIN GROUP (ORDER BY id_part) as partners
FROM (SELECT id_part, denumire,
TRIM(REGEXP_REPLACE(UPPER(TRIM(cod_fiscal)), '^RO\\s*', '')) as bare_cui
FROM nom_parteneri WHERE NVL(sters,0)=0
AND cod_fiscal IS NOT NULL AND LENGTH(TRIM(cod_fiscal)) >= 3)
GROUP BY bare_cui HAVING COUNT(*) > 1
ORDER BY cnt DESC
""")
rows = cur.fetchall()
finally:
database.pool.release(conn)
database.close_oracle()
# Output markdown + CSV
print(f"\n## Duplicate Partners Report\n")
print(f"Found {len(rows)} CUIs with duplicate partners.\n")
print("| CUI | Count | Partners |")
print("|-----|-------|----------|")
for row in rows:
print(f"| {row[0]} | {row[1]} | {row[2][:100]} |")
# CSV output
csv_path = os.path.join(os.path.dirname(__file__), 'duplicate_partners.csv')
with open(csv_path, 'w', newline='') as f:
writer = csv.writer(f)
writer.writerow(['bare_cui', 'count', 'partners'])
for row in rows:
writer.writerow(row)
print(f"\nCSV saved: {csv_path}")
if __name__ == '__main__':
scan_duplicates()