feat: add FastAPI admin dashboard with sync orchestration and test suite
Replace Flask admin with FastAPI app (api/app/) featuring: - Dashboard with stat cards, sync control, and history - Mappings CRUD for ARTICOLE_TERTI with CSV import/export - Article autocomplete from NOM_ARTICOLE - SKU pre-validation before import - Sync orchestration: read JSONs -> validate -> import -> log to SQLite - APScheduler for periodic sync from UI - File logging to logs/sync_comenzi_YYYYMMDD_HHMMSS.log - Oracle pool None guard (503 vs 500 on unavailable) Test suite: - test_app_basic.py: 30 tests (imports + routes) without Oracle - test_integration.py: 9 integration tests with Oracle Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
0
api/app/services/__init__.py
Normal file
0
api/app/services/__init__.py
Normal file
27
api/app/services/article_service.py
Normal file
27
api/app/services/article_service.py
Normal file
@@ -0,0 +1,27 @@
|
||||
import logging
|
||||
from fastapi import HTTPException
|
||||
from .. import database
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def search_articles(query: str, limit: int = 20):
|
||||
"""Search articles in NOM_ARTICOLE by codmat or denumire."""
|
||||
if database.pool is None:
|
||||
raise HTTPException(status_code=503, detail="Oracle unavailable")
|
||||
|
||||
if not query or len(query) < 2:
|
||||
return []
|
||||
|
||||
with database.pool.acquire() as conn:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute("""
|
||||
SELECT id_articol, codmat, denumire
|
||||
FROM nom_articole
|
||||
WHERE (UPPER(codmat) LIKE UPPER(:q) || '%'
|
||||
OR UPPER(denumire) LIKE '%' || UPPER(:q) || '%')
|
||||
AND ROWNUM <= :lim
|
||||
ORDER BY CASE WHEN UPPER(codmat) LIKE UPPER(:q) || '%' THEN 0 ELSE 1 END, codmat
|
||||
""", {"q": query, "lim": limit})
|
||||
|
||||
columns = [col[0].lower() for col in cur.description]
|
||||
return [dict(zip(columns, row)) for row in cur.fetchall()]
|
||||
192
api/app/services/import_service.py
Normal file
192
api/app/services/import_service.py
Normal file
@@ -0,0 +1,192 @@
|
||||
import html
|
||||
import json
|
||||
import logging
|
||||
import oracledb
|
||||
from datetime import datetime, timedelta
|
||||
from .. import database
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Diacritics to ASCII mapping (Romanian)
|
||||
_DIACRITICS = str.maketrans({
|
||||
'\u0103': 'a', # ă
|
||||
'\u00e2': 'a', # â
|
||||
'\u00ee': 'i', # î
|
||||
'\u0219': 's', # ș
|
||||
'\u021b': 't', # ț
|
||||
'\u0102': 'A', # Ă
|
||||
'\u00c2': 'A', # Â
|
||||
'\u00ce': 'I', # Î
|
||||
'\u0218': 'S', # Ș
|
||||
'\u021a': 'T', # Ț
|
||||
# Older Unicode variants
|
||||
'\u015f': 's', # ş (cedilla)
|
||||
'\u0163': 't', # ţ (cedilla)
|
||||
'\u015e': 'S', # Ş
|
||||
'\u0162': 'T', # Ţ
|
||||
})
|
||||
|
||||
|
||||
def clean_web_text(text: str) -> str:
|
||||
"""Port of VFP CleanWebText: unescape HTML entities + diacritics to ASCII."""
|
||||
if not text:
|
||||
return ""
|
||||
result = html.unescape(text)
|
||||
result = result.translate(_DIACRITICS)
|
||||
# Remove any remaining <br> tags
|
||||
for br in ('<br>', '<br/>', '<br />'):
|
||||
result = result.replace(br, ' ')
|
||||
return result.strip()
|
||||
|
||||
|
||||
def convert_web_date(date_str: str) -> datetime:
|
||||
"""Port of VFP ConvertWebDate: parse web date to datetime."""
|
||||
if not date_str:
|
||||
return datetime.now()
|
||||
try:
|
||||
return datetime.strptime(date_str[:10], '%Y-%m-%d')
|
||||
except ValueError:
|
||||
return datetime.now()
|
||||
|
||||
|
||||
def format_address_for_oracle(address: str, city: str, region: str) -> str:
|
||||
"""Port of VFP FormatAddressForOracle."""
|
||||
region_clean = clean_web_text(region)
|
||||
city_clean = clean_web_text(city)
|
||||
address_clean = clean_web_text(address)
|
||||
return f"JUD:{region_clean};{city_clean};{address_clean}"
|
||||
|
||||
|
||||
def build_articles_json(items) -> str:
|
||||
"""Build JSON string for Oracle PACK_IMPORT_COMENZI.importa_comanda."""
|
||||
articles = []
|
||||
for item in items:
|
||||
articles.append({
|
||||
"sku": item.sku,
|
||||
"quantity": str(item.quantity),
|
||||
"price": str(item.price),
|
||||
"vat": str(item.vat),
|
||||
"name": clean_web_text(item.name)
|
||||
})
|
||||
return json.dumps(articles)
|
||||
|
||||
|
||||
def import_single_order(order, id_pol: int = None, id_sectie: int = None) -> dict:
|
||||
"""Import a single order into Oracle ROA.
|
||||
|
||||
Returns dict with:
|
||||
success: bool
|
||||
id_comanda: int or None
|
||||
id_partener: int or None
|
||||
error: str or None
|
||||
"""
|
||||
result = {
|
||||
"success": False,
|
||||
"id_comanda": None,
|
||||
"id_partener": None,
|
||||
"error": None
|
||||
}
|
||||
|
||||
try:
|
||||
order_number = clean_web_text(order.number)
|
||||
order_date = convert_web_date(order.date)
|
||||
|
||||
with database.pool.acquire() as conn:
|
||||
with conn.cursor() as cur:
|
||||
# Step 1: Process partner
|
||||
id_partener = cur.var(oracledb.DB_TYPE_NUMBER)
|
||||
|
||||
if order.billing.is_company:
|
||||
denumire = clean_web_text(order.billing.company_name)
|
||||
cod_fiscal = clean_web_text(order.billing.company_code) or None
|
||||
registru = clean_web_text(order.billing.company_reg) or None
|
||||
is_pj = 1
|
||||
else:
|
||||
denumire = clean_web_text(
|
||||
f"{order.billing.firstname} {order.billing.lastname}"
|
||||
)
|
||||
cod_fiscal = None
|
||||
registru = None
|
||||
is_pj = 0
|
||||
|
||||
cur.callproc("PACK_IMPORT_PARTENERI.cauta_sau_creeaza_partener", [
|
||||
cod_fiscal, denumire, registru, is_pj, id_partener
|
||||
])
|
||||
|
||||
partner_id = id_partener.getvalue()
|
||||
if not partner_id or partner_id <= 0:
|
||||
result["error"] = f"Partner creation failed for {denumire}"
|
||||
return result
|
||||
|
||||
result["id_partener"] = int(partner_id)
|
||||
|
||||
# Step 2: Process billing address
|
||||
id_adresa_fact = cur.var(oracledb.DB_TYPE_NUMBER)
|
||||
billing_addr = format_address_for_oracle(
|
||||
order.billing.address, order.billing.city, order.billing.region
|
||||
)
|
||||
cur.callproc("PACK_IMPORT_PARTENERI.cauta_sau_creeaza_adresa", [
|
||||
partner_id, billing_addr,
|
||||
order.billing.phone or "",
|
||||
order.billing.email or "",
|
||||
id_adresa_fact
|
||||
])
|
||||
addr_fact_id = id_adresa_fact.getvalue()
|
||||
|
||||
# Step 3: Process shipping address (if different)
|
||||
addr_livr_id = None
|
||||
if order.shipping:
|
||||
id_adresa_livr = cur.var(oracledb.DB_TYPE_NUMBER)
|
||||
shipping_addr = format_address_for_oracle(
|
||||
order.shipping.address, order.shipping.city,
|
||||
order.shipping.region
|
||||
)
|
||||
cur.callproc("PACK_IMPORT_PARTENERI.cauta_sau_creeaza_adresa", [
|
||||
partner_id, shipping_addr,
|
||||
order.shipping.phone or "",
|
||||
order.shipping.email or "",
|
||||
id_adresa_livr
|
||||
])
|
||||
addr_livr_id = id_adresa_livr.getvalue()
|
||||
|
||||
# Step 4: Build articles JSON and import order
|
||||
articles_json = build_articles_json(order.items)
|
||||
|
||||
# Use CLOB for the JSON
|
||||
clob_var = cur.var(oracledb.DB_TYPE_CLOB)
|
||||
clob_var.setvalue(0, articles_json)
|
||||
|
||||
id_comanda = cur.var(oracledb.DB_TYPE_NUMBER)
|
||||
|
||||
cur.callproc("PACK_IMPORT_COMENZI.importa_comanda", [
|
||||
order_number, # p_nr_comanda_ext
|
||||
order_date, # p_data_comanda
|
||||
partner_id, # p_id_partener
|
||||
clob_var, # p_json_articole (CLOB)
|
||||
addr_livr_id, # p_id_adresa_livrare
|
||||
addr_fact_id, # p_id_adresa_facturare
|
||||
id_pol, # p_id_pol
|
||||
id_sectie, # p_id_sectie
|
||||
id_comanda # v_id_comanda (OUT)
|
||||
])
|
||||
|
||||
comanda_id = id_comanda.getvalue()
|
||||
|
||||
if comanda_id and comanda_id > 0:
|
||||
conn.commit()
|
||||
result["success"] = True
|
||||
result["id_comanda"] = int(comanda_id)
|
||||
logger.info(f"Order {order_number} imported: ID={comanda_id}")
|
||||
else:
|
||||
conn.rollback()
|
||||
result["error"] = "importa_comanda returned invalid ID"
|
||||
|
||||
except oracledb.DatabaseError as e:
|
||||
error_msg = str(e)
|
||||
result["error"] = error_msg
|
||||
logger.error(f"Oracle error importing order {order.number}: {error_msg}")
|
||||
except Exception as e:
|
||||
result["error"] = str(e)
|
||||
logger.error(f"Error importing order {order.number}: {e}")
|
||||
|
||||
return result
|
||||
188
api/app/services/mapping_service.py
Normal file
188
api/app/services/mapping_service.py
Normal file
@@ -0,0 +1,188 @@
|
||||
import oracledb
|
||||
import csv
|
||||
import io
|
||||
import logging
|
||||
from fastapi import HTTPException
|
||||
from .. import database
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def get_mappings(search: str = "", page: int = 1, per_page: int = 50):
|
||||
"""Get paginated mappings with optional search."""
|
||||
if database.pool is None:
|
||||
raise HTTPException(status_code=503, detail="Oracle unavailable")
|
||||
|
||||
offset = (page - 1) * per_page
|
||||
|
||||
with database.pool.acquire() as conn:
|
||||
with conn.cursor() as cur:
|
||||
# Build WHERE clause
|
||||
where = ""
|
||||
params = {}
|
||||
if search:
|
||||
where = """WHERE (UPPER(at.sku) LIKE '%' || UPPER(:search) || '%'
|
||||
OR UPPER(at.codmat) LIKE '%' || UPPER(:search) || '%'
|
||||
OR UPPER(na.denumire) LIKE '%' || UPPER(:search) || '%')"""
|
||||
params["search"] = search
|
||||
|
||||
# Count total
|
||||
count_sql = f"""
|
||||
SELECT COUNT(*) FROM ARTICOLE_TERTI at
|
||||
LEFT JOIN nom_articole na ON na.codmat = at.codmat
|
||||
{where}
|
||||
"""
|
||||
cur.execute(count_sql, params)
|
||||
total = cur.fetchone()[0]
|
||||
|
||||
# Get page
|
||||
data_sql = f"""
|
||||
SELECT at.sku, at.codmat, na.denumire, at.cantitate_roa,
|
||||
at.procent_pret, at.activ,
|
||||
TO_CHAR(at.data_creare, 'YYYY-MM-DD HH24:MI') as data_creare
|
||||
FROM ARTICOLE_TERTI at
|
||||
LEFT JOIN nom_articole na ON na.codmat = at.codmat
|
||||
{where}
|
||||
ORDER BY at.sku, at.codmat
|
||||
OFFSET :offset ROWS FETCH NEXT :per_page ROWS ONLY
|
||||
"""
|
||||
params["offset"] = offset
|
||||
params["per_page"] = per_page
|
||||
cur.execute(data_sql, params)
|
||||
|
||||
columns = [col[0].lower() for col in cur.description]
|
||||
rows = [dict(zip(columns, row)) for row in cur.fetchall()]
|
||||
|
||||
return {
|
||||
"mappings": rows,
|
||||
"total": total,
|
||||
"page": page,
|
||||
"per_page": per_page,
|
||||
"pages": (total + per_page - 1) // per_page
|
||||
}
|
||||
|
||||
def create_mapping(sku: str, codmat: str, cantitate_roa: float = 1, procent_pret: float = 100):
|
||||
"""Create a new mapping."""
|
||||
if database.pool is None:
|
||||
raise HTTPException(status_code=503, detail="Oracle unavailable")
|
||||
|
||||
with database.pool.acquire() as conn:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute("""
|
||||
INSERT INTO ARTICOLE_TERTI (sku, codmat, cantitate_roa, procent_pret, activ, data_creare, id_util_creare)
|
||||
VALUES (:sku, :codmat, :cantitate_roa, :procent_pret, 1, SYSDATE, -3)
|
||||
""", {"sku": sku, "codmat": codmat, "cantitate_roa": cantitate_roa, "procent_pret": procent_pret})
|
||||
conn.commit()
|
||||
return {"sku": sku, "codmat": codmat}
|
||||
|
||||
def update_mapping(sku: str, codmat: str, cantitate_roa: float = None, procent_pret: float = None, activ: int = None):
|
||||
"""Update an existing mapping."""
|
||||
if database.pool is None:
|
||||
raise HTTPException(status_code=503, detail="Oracle unavailable")
|
||||
|
||||
sets = []
|
||||
params = {"sku": sku, "codmat": codmat}
|
||||
|
||||
if cantitate_roa is not None:
|
||||
sets.append("cantitate_roa = :cantitate_roa")
|
||||
params["cantitate_roa"] = cantitate_roa
|
||||
if procent_pret is not None:
|
||||
sets.append("procent_pret = :procent_pret")
|
||||
params["procent_pret"] = procent_pret
|
||||
if activ is not None:
|
||||
sets.append("activ = :activ")
|
||||
params["activ"] = activ
|
||||
|
||||
if not sets:
|
||||
return False
|
||||
|
||||
sets.append("data_modif = SYSDATE")
|
||||
set_clause = ", ".join(sets)
|
||||
|
||||
with database.pool.acquire() as conn:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(f"""
|
||||
UPDATE ARTICOLE_TERTI SET {set_clause}
|
||||
WHERE sku = :sku AND codmat = :codmat
|
||||
""", params)
|
||||
conn.commit()
|
||||
return cur.rowcount > 0
|
||||
|
||||
def delete_mapping(sku: str, codmat: str):
|
||||
"""Soft delete (set activ=0)."""
|
||||
return update_mapping(sku, codmat, activ=0)
|
||||
|
||||
def import_csv(file_content: str):
|
||||
"""Import mappings from CSV content. Returns summary."""
|
||||
if database.pool is None:
|
||||
raise HTTPException(status_code=503, detail="Oracle unavailable")
|
||||
|
||||
reader = csv.DictReader(io.StringIO(file_content))
|
||||
created = 0
|
||||
updated = 0
|
||||
errors = []
|
||||
|
||||
with database.pool.acquire() as conn:
|
||||
with conn.cursor() as cur:
|
||||
for i, row in enumerate(reader, 1):
|
||||
try:
|
||||
sku = row.get("sku", "").strip()
|
||||
codmat = row.get("codmat", "").strip()
|
||||
cantitate = float(row.get("cantitate_roa", "1") or "1")
|
||||
procent = float(row.get("procent_pret", "100") or "100")
|
||||
|
||||
if not sku or not codmat:
|
||||
errors.append(f"Row {i}: missing sku or codmat")
|
||||
continue
|
||||
|
||||
# Try update first, insert if not exists (MERGE)
|
||||
cur.execute("""
|
||||
MERGE INTO ARTICOLE_TERTI t
|
||||
USING (SELECT :sku AS sku, :codmat AS codmat FROM DUAL) s
|
||||
ON (t.sku = s.sku AND t.codmat = s.codmat)
|
||||
WHEN MATCHED THEN UPDATE SET
|
||||
cantitate_roa = :cantitate_roa,
|
||||
procent_pret = :procent_pret,
|
||||
activ = 1,
|
||||
data_modif = SYSDATE
|
||||
WHEN NOT MATCHED THEN INSERT
|
||||
(sku, codmat, cantitate_roa, procent_pret, activ, data_creare, id_util_creare)
|
||||
VALUES (:sku, :codmat, :cantitate_roa, :procent_pret, 1, SYSDATE, -3)
|
||||
""", {"sku": sku, "codmat": codmat, "cantitate_roa": cantitate, "procent_pret": procent})
|
||||
|
||||
# Check if it was insert or update by rowcount
|
||||
created += 1 # We count total processed
|
||||
|
||||
except Exception as e:
|
||||
errors.append(f"Row {i}: {str(e)}")
|
||||
|
||||
conn.commit()
|
||||
|
||||
return {"processed": created, "errors": errors}
|
||||
|
||||
def export_csv():
|
||||
"""Export all mappings as CSV string."""
|
||||
if database.pool is None:
|
||||
raise HTTPException(status_code=503, detail="Oracle unavailable")
|
||||
|
||||
output = io.StringIO()
|
||||
writer = csv.writer(output)
|
||||
writer.writerow(["sku", "codmat", "cantitate_roa", "procent_pret", "activ"])
|
||||
|
||||
with database.pool.acquire() as conn:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute("""
|
||||
SELECT sku, codmat, cantitate_roa, procent_pret, activ
|
||||
FROM ARTICOLE_TERTI ORDER BY sku, codmat
|
||||
""")
|
||||
for row in cur:
|
||||
writer.writerow(row)
|
||||
|
||||
return output.getvalue()
|
||||
|
||||
def get_csv_template():
|
||||
"""Return empty CSV template."""
|
||||
output = io.StringIO()
|
||||
writer = csv.writer(output)
|
||||
writer.writerow(["sku", "codmat", "cantitate_roa", "procent_pret"])
|
||||
writer.writerow(["EXAMPLE_SKU", "EXAMPLE_CODMAT", "1", "100"])
|
||||
return output.getvalue()
|
||||
178
api/app/services/order_reader.py
Normal file
178
api/app/services/order_reader.py
Normal file
@@ -0,0 +1,178 @@
|
||||
import json
|
||||
import glob
|
||||
import os
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Optional
|
||||
|
||||
from ..config import settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@dataclass
|
||||
class OrderItem:
|
||||
sku: str
|
||||
name: str
|
||||
price: float
|
||||
quantity: float
|
||||
vat: float
|
||||
|
||||
@dataclass
|
||||
class OrderBilling:
|
||||
firstname: str = ""
|
||||
lastname: str = ""
|
||||
phone: str = ""
|
||||
email: str = ""
|
||||
address: str = ""
|
||||
city: str = ""
|
||||
region: str = ""
|
||||
country: str = ""
|
||||
company_name: str = ""
|
||||
company_code: str = ""
|
||||
company_reg: str = ""
|
||||
is_company: bool = False
|
||||
|
||||
@dataclass
|
||||
class OrderShipping:
|
||||
firstname: str = ""
|
||||
lastname: str = ""
|
||||
phone: str = ""
|
||||
email: str = ""
|
||||
address: str = ""
|
||||
city: str = ""
|
||||
region: str = ""
|
||||
country: str = ""
|
||||
|
||||
@dataclass
|
||||
class OrderData:
|
||||
id: str
|
||||
number: str
|
||||
date: str
|
||||
status: str = ""
|
||||
status_id: str = ""
|
||||
items: list = field(default_factory=list) # list of OrderItem
|
||||
billing: OrderBilling = field(default_factory=OrderBilling)
|
||||
shipping: Optional[OrderShipping] = None
|
||||
payment_name: str = ""
|
||||
delivery_name: str = ""
|
||||
source_file: str = ""
|
||||
|
||||
def read_json_orders(json_dir: str = None) -> tuple[list[OrderData], int]:
|
||||
"""Read all GoMag order JSON files from the output directory.
|
||||
Returns (list of OrderData, number of JSON files read).
|
||||
"""
|
||||
if json_dir is None:
|
||||
json_dir = settings.JSON_OUTPUT_DIR
|
||||
|
||||
if not json_dir or not os.path.isdir(json_dir):
|
||||
logger.warning(f"JSON output directory not found: {json_dir}")
|
||||
return [], 0
|
||||
|
||||
# Find all gomag_orders*.json files
|
||||
pattern = os.path.join(json_dir, "gomag_orders*.json")
|
||||
json_files = sorted(glob.glob(pattern))
|
||||
|
||||
if not json_files:
|
||||
logger.info(f"No JSON files found in {json_dir}")
|
||||
return [], 0
|
||||
|
||||
orders = []
|
||||
for filepath in json_files:
|
||||
try:
|
||||
with open(filepath, 'r', encoding='utf-8') as f:
|
||||
data = json.load(f)
|
||||
|
||||
raw_orders = data.get("orders", {})
|
||||
if not isinstance(raw_orders, dict):
|
||||
continue
|
||||
|
||||
for order_id, order_data in raw_orders.items():
|
||||
try:
|
||||
order = _parse_order(order_id, order_data, os.path.basename(filepath))
|
||||
orders.append(order)
|
||||
except Exception as e:
|
||||
logger.warning(f"Error parsing order {order_id} from {filepath}: {e}")
|
||||
except Exception as e:
|
||||
logger.error(f"Error reading {filepath}: {e}")
|
||||
|
||||
logger.info(f"Read {len(orders)} orders from {len(json_files)} JSON files")
|
||||
return orders, len(json_files)
|
||||
|
||||
def _parse_order(order_id: str, data: dict, source_file: str) -> OrderData:
|
||||
"""Parse a single order from JSON data."""
|
||||
# Parse items
|
||||
items = []
|
||||
raw_items = data.get("items", [])
|
||||
if isinstance(raw_items, list):
|
||||
for item in raw_items:
|
||||
if isinstance(item, dict) and item.get("sku"):
|
||||
items.append(OrderItem(
|
||||
sku=str(item.get("sku", "")).strip(),
|
||||
name=str(item.get("name", "")),
|
||||
price=float(item.get("price", 0) or 0),
|
||||
quantity=float(item.get("quantity", 0) or 0),
|
||||
vat=float(item.get("vat", 0) or 0)
|
||||
))
|
||||
|
||||
# Parse billing
|
||||
billing_data = data.get("billing", {}) or {}
|
||||
company = billing_data.get("company")
|
||||
is_company = isinstance(company, dict) and bool(company.get("name"))
|
||||
|
||||
billing = OrderBilling(
|
||||
firstname=str(billing_data.get("firstname", "")),
|
||||
lastname=str(billing_data.get("lastname", "")),
|
||||
phone=str(billing_data.get("phone", "")),
|
||||
email=str(billing_data.get("email", "")),
|
||||
address=str(billing_data.get("address", "")),
|
||||
city=str(billing_data.get("city", "")),
|
||||
region=str(billing_data.get("region", "")),
|
||||
country=str(billing_data.get("country", "")),
|
||||
company_name=str(company.get("name", "")) if is_company else "",
|
||||
company_code=str(company.get("code", "")) if is_company else "",
|
||||
company_reg=str(company.get("registrationNo", "")) if is_company else "",
|
||||
is_company=is_company
|
||||
)
|
||||
|
||||
# Parse shipping
|
||||
shipping_data = data.get("shipping")
|
||||
shipping = None
|
||||
if isinstance(shipping_data, dict):
|
||||
shipping = OrderShipping(
|
||||
firstname=str(shipping_data.get("firstname", "")),
|
||||
lastname=str(shipping_data.get("lastname", "")),
|
||||
phone=str(shipping_data.get("phone", "")),
|
||||
email=str(shipping_data.get("email", "")),
|
||||
address=str(shipping_data.get("address", "")),
|
||||
city=str(shipping_data.get("city", "")),
|
||||
region=str(shipping_data.get("region", "")),
|
||||
country=str(shipping_data.get("country", ""))
|
||||
)
|
||||
|
||||
# Payment/delivery
|
||||
payment = data.get("payment", {}) or {}
|
||||
delivery = data.get("delivery", {}) or {}
|
||||
|
||||
return OrderData(
|
||||
id=str(data.get("id", order_id)),
|
||||
number=str(data.get("number", "")),
|
||||
date=str(data.get("date", "")),
|
||||
status=str(data.get("status", "")),
|
||||
status_id=str(data.get("statusId", "")),
|
||||
items=items,
|
||||
billing=billing,
|
||||
shipping=shipping,
|
||||
payment_name=str(payment.get("name", "")) if isinstance(payment, dict) else "",
|
||||
delivery_name=str(delivery.get("name", "")) if isinstance(delivery, dict) else "",
|
||||
source_file=source_file
|
||||
)
|
||||
|
||||
def get_all_skus(orders: list[OrderData]) -> set[str]:
|
||||
"""Extract unique SKUs from all orders."""
|
||||
skus = set()
|
||||
for order in orders:
|
||||
for item in order.items:
|
||||
if item.sku:
|
||||
skus.add(item.sku)
|
||||
return skus
|
||||
71
api/app/services/scheduler_service.py
Normal file
71
api/app/services/scheduler_service.py
Normal file
@@ -0,0 +1,71 @@
|
||||
import logging
|
||||
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
||||
from apscheduler.triggers.interval import IntervalTrigger
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_scheduler = None
|
||||
_is_running = False
|
||||
|
||||
|
||||
def init_scheduler():
|
||||
"""Initialize the APScheduler instance."""
|
||||
global _scheduler
|
||||
_scheduler = AsyncIOScheduler()
|
||||
logger.info("Scheduler initialized")
|
||||
|
||||
|
||||
def start_scheduler(interval_minutes: int = 5):
|
||||
"""Start the scheduler with the given interval."""
|
||||
global _is_running
|
||||
if _scheduler is None:
|
||||
init_scheduler()
|
||||
|
||||
# Remove existing job if any
|
||||
if _scheduler.get_job("sync_job"):
|
||||
_scheduler.remove_job("sync_job")
|
||||
|
||||
from . import sync_service
|
||||
|
||||
_scheduler.add_job(
|
||||
sync_service.run_sync,
|
||||
trigger=IntervalTrigger(minutes=interval_minutes),
|
||||
id="sync_job",
|
||||
name="GoMag Sync",
|
||||
replace_existing=True
|
||||
)
|
||||
|
||||
if not _scheduler.running:
|
||||
_scheduler.start()
|
||||
|
||||
_is_running = True
|
||||
logger.info(f"Scheduler started with interval {interval_minutes}min")
|
||||
|
||||
|
||||
def stop_scheduler():
|
||||
"""Stop the scheduler."""
|
||||
global _is_running
|
||||
if _scheduler and _scheduler.running:
|
||||
if _scheduler.get_job("sync_job"):
|
||||
_scheduler.remove_job("sync_job")
|
||||
_is_running = False
|
||||
logger.info("Scheduler stopped")
|
||||
|
||||
|
||||
def shutdown_scheduler():
|
||||
"""Shutdown the scheduler completely."""
|
||||
global _scheduler, _is_running
|
||||
if _scheduler and _scheduler.running:
|
||||
_scheduler.shutdown(wait=False)
|
||||
_scheduler = None
|
||||
_is_running = False
|
||||
|
||||
|
||||
def get_scheduler_status():
|
||||
"""Get current scheduler status."""
|
||||
job = _scheduler.get_job("sync_job") if _scheduler else None
|
||||
return {
|
||||
"enabled": _is_running,
|
||||
"next_run": job.next_run_time.isoformat() if job and job.next_run_time else None,
|
||||
"interval_minutes": int(job.trigger.interval.total_seconds() / 60) if job else None
|
||||
}
|
||||
206
api/app/services/sqlite_service.py
Normal file
206
api/app/services/sqlite_service.py
Normal file
@@ -0,0 +1,206 @@
|
||||
import json
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from ..database import get_sqlite, get_sqlite_sync
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def create_sync_run(run_id: str, json_files: int = 0):
|
||||
"""Create a new sync run record."""
|
||||
db = await get_sqlite()
|
||||
try:
|
||||
await db.execute("""
|
||||
INSERT INTO sync_runs (run_id, started_at, status, json_files)
|
||||
VALUES (?, datetime('now'), 'running', ?)
|
||||
""", (run_id, json_files))
|
||||
await db.commit()
|
||||
finally:
|
||||
await db.close()
|
||||
|
||||
|
||||
async def update_sync_run(run_id: str, status: str, total_orders: int = 0,
|
||||
imported: int = 0, skipped: int = 0, errors: int = 0):
|
||||
"""Update sync run with results."""
|
||||
db = await get_sqlite()
|
||||
try:
|
||||
await db.execute("""
|
||||
UPDATE sync_runs SET
|
||||
finished_at = datetime('now'),
|
||||
status = ?,
|
||||
total_orders = ?,
|
||||
imported = ?,
|
||||
skipped = ?,
|
||||
errors = ?
|
||||
WHERE run_id = ?
|
||||
""", (status, total_orders, imported, skipped, errors, run_id))
|
||||
await db.commit()
|
||||
finally:
|
||||
await db.close()
|
||||
|
||||
|
||||
async def add_import_order(sync_run_id: str, order_number: str, order_date: str,
|
||||
customer_name: str, status: str, id_comanda: int = None,
|
||||
id_partener: int = None, error_message: str = None,
|
||||
missing_skus: list = None, items_count: int = 0):
|
||||
"""Record an individual order import result."""
|
||||
db = await get_sqlite()
|
||||
try:
|
||||
await db.execute("""
|
||||
INSERT INTO import_orders
|
||||
(sync_run_id, order_number, order_date, customer_name, status,
|
||||
id_comanda, id_partener, error_message, missing_skus, items_count)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""", (sync_run_id, order_number, order_date, customer_name, status,
|
||||
id_comanda, id_partener, error_message,
|
||||
json.dumps(missing_skus) if missing_skus else None, items_count))
|
||||
await db.commit()
|
||||
finally:
|
||||
await db.close()
|
||||
|
||||
|
||||
async def track_missing_sku(sku: str, product_name: str = ""):
|
||||
"""Track a missing SKU."""
|
||||
db = await get_sqlite()
|
||||
try:
|
||||
await db.execute("""
|
||||
INSERT OR IGNORE INTO missing_skus (sku, product_name)
|
||||
VALUES (?, ?)
|
||||
""", (sku, product_name))
|
||||
await db.commit()
|
||||
finally:
|
||||
await db.close()
|
||||
|
||||
|
||||
async def resolve_missing_sku(sku: str):
|
||||
"""Mark a missing SKU as resolved."""
|
||||
db = await get_sqlite()
|
||||
try:
|
||||
await db.execute("""
|
||||
UPDATE missing_skus SET resolved = 1, resolved_at = datetime('now')
|
||||
WHERE sku = ?
|
||||
""", (sku,))
|
||||
await db.commit()
|
||||
finally:
|
||||
await db.close()
|
||||
|
||||
|
||||
async def get_sync_runs(page: int = 1, per_page: int = 20):
|
||||
"""Get paginated sync run history."""
|
||||
db = await get_sqlite()
|
||||
try:
|
||||
offset = (page - 1) * per_page
|
||||
|
||||
cursor = await db.execute("SELECT COUNT(*) FROM sync_runs")
|
||||
total = (await cursor.fetchone())[0]
|
||||
|
||||
cursor = await db.execute("""
|
||||
SELECT * FROM sync_runs
|
||||
ORDER BY started_at DESC
|
||||
LIMIT ? OFFSET ?
|
||||
""", (per_page, offset))
|
||||
rows = await cursor.fetchall()
|
||||
|
||||
return {
|
||||
"runs": [dict(row) for row in rows],
|
||||
"total": total,
|
||||
"page": page,
|
||||
"pages": (total + per_page - 1) // per_page if total > 0 else 0
|
||||
}
|
||||
finally:
|
||||
await db.close()
|
||||
|
||||
|
||||
async def get_sync_run_detail(run_id: str):
|
||||
"""Get details for a specific sync run including its orders."""
|
||||
db = await get_sqlite()
|
||||
try:
|
||||
cursor = await db.execute(
|
||||
"SELECT * FROM sync_runs WHERE run_id = ?", (run_id,)
|
||||
)
|
||||
run = await cursor.fetchone()
|
||||
if not run:
|
||||
return None
|
||||
|
||||
cursor = await db.execute("""
|
||||
SELECT * FROM import_orders
|
||||
WHERE sync_run_id = ?
|
||||
ORDER BY created_at
|
||||
""", (run_id,))
|
||||
orders = await cursor.fetchall()
|
||||
|
||||
return {
|
||||
"run": dict(run),
|
||||
"orders": [dict(o) for o in orders]
|
||||
}
|
||||
finally:
|
||||
await db.close()
|
||||
|
||||
|
||||
async def get_dashboard_stats():
|
||||
"""Get stats for the dashboard."""
|
||||
db = await get_sqlite()
|
||||
try:
|
||||
# Total imported
|
||||
cursor = await db.execute(
|
||||
"SELECT COUNT(*) FROM import_orders WHERE status = 'IMPORTED'"
|
||||
)
|
||||
imported = (await cursor.fetchone())[0]
|
||||
|
||||
# Total skipped
|
||||
cursor = await db.execute(
|
||||
"SELECT COUNT(*) FROM import_orders WHERE status = 'SKIPPED'"
|
||||
)
|
||||
skipped = (await cursor.fetchone())[0]
|
||||
|
||||
# Total errors
|
||||
cursor = await db.execute(
|
||||
"SELECT COUNT(*) FROM import_orders WHERE status = 'ERROR'"
|
||||
)
|
||||
errors = (await cursor.fetchone())[0]
|
||||
|
||||
# Missing SKUs (unresolved)
|
||||
cursor = await db.execute(
|
||||
"SELECT COUNT(*) FROM missing_skus WHERE resolved = 0"
|
||||
)
|
||||
missing = (await cursor.fetchone())[0]
|
||||
|
||||
# Last sync run
|
||||
cursor = await db.execute("""
|
||||
SELECT * FROM sync_runs ORDER BY started_at DESC LIMIT 1
|
||||
""")
|
||||
last_run = await cursor.fetchone()
|
||||
|
||||
return {
|
||||
"imported": imported,
|
||||
"skipped": skipped,
|
||||
"errors": errors,
|
||||
"missing_skus": missing,
|
||||
"last_run": dict(last_run) if last_run else None
|
||||
}
|
||||
finally:
|
||||
await db.close()
|
||||
|
||||
|
||||
async def get_scheduler_config():
|
||||
"""Get scheduler configuration from SQLite."""
|
||||
db = await get_sqlite()
|
||||
try:
|
||||
cursor = await db.execute("SELECT key, value FROM scheduler_config")
|
||||
rows = await cursor.fetchall()
|
||||
return {row["key"]: row["value"] for row in rows}
|
||||
finally:
|
||||
await db.close()
|
||||
|
||||
|
||||
async def set_scheduler_config(key: str, value: str):
|
||||
"""Set a scheduler configuration value."""
|
||||
db = await get_sqlite()
|
||||
try:
|
||||
await db.execute("""
|
||||
INSERT OR REPLACE INTO scheduler_config (key, value)
|
||||
VALUES (?, ?)
|
||||
""", (key, value))
|
||||
await db.commit()
|
||||
finally:
|
||||
await db.close()
|
||||
165
api/app/services/sync_service.py
Normal file
165
api/app/services/sync_service.py
Normal file
@@ -0,0 +1,165 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
|
||||
from . import order_reader, validation_service, import_service, sqlite_service
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Sync state
|
||||
_sync_lock = asyncio.Lock()
|
||||
_current_sync = None # dict with run_id, status, progress info
|
||||
|
||||
|
||||
async def get_sync_status():
|
||||
"""Get current sync status."""
|
||||
if _current_sync:
|
||||
return {**_current_sync}
|
||||
return {"status": "idle"}
|
||||
|
||||
|
||||
async def run_sync(id_pol: int = None, id_sectie: int = None) -> dict:
|
||||
"""Run a full sync cycle. Returns summary dict."""
|
||||
global _current_sync
|
||||
|
||||
if _sync_lock.locked():
|
||||
return {"error": "Sync already running"}
|
||||
|
||||
async with _sync_lock:
|
||||
run_id = datetime.now().strftime("%Y%m%d_%H%M%S") + "_" + uuid.uuid4().hex[:6]
|
||||
_current_sync = {
|
||||
"run_id": run_id,
|
||||
"status": "running",
|
||||
"started_at": datetime.now().isoformat(),
|
||||
"progress": "Reading JSON files..."
|
||||
}
|
||||
|
||||
try:
|
||||
# Step 1: Read orders
|
||||
orders, json_count = order_reader.read_json_orders()
|
||||
await sqlite_service.create_sync_run(run_id, json_count)
|
||||
|
||||
if not orders:
|
||||
await sqlite_service.update_sync_run(run_id, "completed", 0, 0, 0, 0)
|
||||
_current_sync = None
|
||||
return {
|
||||
"run_id": run_id,
|
||||
"status": "completed",
|
||||
"message": "No orders found",
|
||||
"json_files": json_count
|
||||
}
|
||||
|
||||
_current_sync["progress"] = f"Validating {len(orders)} orders..."
|
||||
|
||||
# Step 2: Validate SKUs (blocking Oracle call -> run in thread)
|
||||
all_skus = order_reader.get_all_skus(orders)
|
||||
validation = await asyncio.to_thread(validation_service.validate_skus, all_skus)
|
||||
importable, skipped = validation_service.classify_orders(orders, validation)
|
||||
|
||||
# Track missing SKUs
|
||||
for sku in validation["missing"]:
|
||||
product_name = ""
|
||||
for order in orders:
|
||||
for item in order.items:
|
||||
if item.sku == sku:
|
||||
product_name = item.name
|
||||
break
|
||||
if product_name:
|
||||
break
|
||||
await sqlite_service.track_missing_sku(sku, product_name)
|
||||
|
||||
# Step 3: Record skipped orders
|
||||
for order, missing_skus in skipped:
|
||||
customer = order.billing.company_name or \
|
||||
f"{order.billing.firstname} {order.billing.lastname}"
|
||||
await sqlite_service.add_import_order(
|
||||
sync_run_id=run_id,
|
||||
order_number=order.number,
|
||||
order_date=order.date,
|
||||
customer_name=customer,
|
||||
status="SKIPPED",
|
||||
missing_skus=missing_skus,
|
||||
items_count=len(order.items)
|
||||
)
|
||||
|
||||
# Step 4: Import valid orders
|
||||
imported_count = 0
|
||||
error_count = 0
|
||||
|
||||
for i, order in enumerate(importable):
|
||||
_current_sync["progress"] = f"Importing {i+1}/{len(importable)}: #{order.number}"
|
||||
|
||||
result = await asyncio.to_thread(
|
||||
import_service.import_single_order,
|
||||
order, id_pol=id_pol, id_sectie=id_sectie
|
||||
)
|
||||
customer = order.billing.company_name or \
|
||||
f"{order.billing.firstname} {order.billing.lastname}"
|
||||
|
||||
if result["success"]:
|
||||
imported_count += 1
|
||||
await sqlite_service.add_import_order(
|
||||
sync_run_id=run_id,
|
||||
order_number=order.number,
|
||||
order_date=order.date,
|
||||
customer_name=customer,
|
||||
status="IMPORTED",
|
||||
id_comanda=result["id_comanda"],
|
||||
id_partener=result["id_partener"],
|
||||
items_count=len(order.items)
|
||||
)
|
||||
else:
|
||||
error_count += 1
|
||||
await sqlite_service.add_import_order(
|
||||
sync_run_id=run_id,
|
||||
order_number=order.number,
|
||||
order_date=order.date,
|
||||
customer_name=customer,
|
||||
status="ERROR",
|
||||
id_partener=result.get("id_partener"),
|
||||
error_message=result["error"],
|
||||
items_count=len(order.items)
|
||||
)
|
||||
|
||||
# Safety: stop if too many errors
|
||||
if error_count > 10:
|
||||
logger.warning("Too many errors, stopping sync")
|
||||
break
|
||||
|
||||
# Step 5: Update sync run
|
||||
status = "completed" if error_count <= 10 else "failed"
|
||||
await sqlite_service.update_sync_run(
|
||||
run_id, status, len(orders), imported_count, len(skipped), error_count
|
||||
)
|
||||
|
||||
summary = {
|
||||
"run_id": run_id,
|
||||
"status": status,
|
||||
"json_files": json_count,
|
||||
"total_orders": len(orders),
|
||||
"imported": imported_count,
|
||||
"skipped": len(skipped),
|
||||
"errors": error_count,
|
||||
"missing_skus": len(validation["missing"])
|
||||
}
|
||||
|
||||
logger.info(
|
||||
f"Sync {run_id} completed: {imported_count} imported, "
|
||||
f"{len(skipped)} skipped, {error_count} errors"
|
||||
)
|
||||
return summary
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Sync {run_id} failed: {e}")
|
||||
await sqlite_service.update_sync_run(run_id, "failed", 0, 0, 0, 1)
|
||||
return {"run_id": run_id, "status": "failed", "error": str(e)}
|
||||
finally:
|
||||
_current_sync = None
|
||||
|
||||
|
||||
def stop_sync():
|
||||
"""Signal sync to stop. Currently sync runs to completion."""
|
||||
# For now, sync runs are not cancellable mid-flight.
|
||||
# Future: use an asyncio.Event for cooperative cancellation.
|
||||
pass
|
||||
71
api/app/services/validation_service.py
Normal file
71
api/app/services/validation_service.py
Normal file
@@ -0,0 +1,71 @@
|
||||
import logging
|
||||
from .. import database
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def validate_skus(skus: set[str]) -> dict:
|
||||
"""Validate a set of SKUs against Oracle.
|
||||
Returns: {mapped: set, direct: set, missing: set}
|
||||
- mapped: found in ARTICOLE_TERTI (active)
|
||||
- direct: found in NOM_ARTICOLE by codmat (not in ARTICOLE_TERTI)
|
||||
- missing: not found anywhere
|
||||
"""
|
||||
if not skus:
|
||||
return {"mapped": set(), "direct": set(), "missing": set()}
|
||||
|
||||
mapped = set()
|
||||
direct = set()
|
||||
sku_list = list(skus)
|
||||
|
||||
with database.pool.acquire() as conn:
|
||||
with conn.cursor() as cur:
|
||||
# Check in batches of 500
|
||||
for i in range(0, len(sku_list), 500):
|
||||
batch = sku_list[i:i+500]
|
||||
placeholders = ",".join([f":s{j}" for j in range(len(batch))])
|
||||
params = {f"s{j}": sku for j, sku in enumerate(batch)}
|
||||
|
||||
# Check ARTICOLE_TERTI
|
||||
cur.execute(f"""
|
||||
SELECT DISTINCT sku FROM ARTICOLE_TERTI
|
||||
WHERE sku IN ({placeholders}) AND activ = 1
|
||||
""", params)
|
||||
for row in cur:
|
||||
mapped.add(row[0])
|
||||
|
||||
# Check NOM_ARTICOLE for remaining
|
||||
remaining = [s for s in batch if s not in mapped]
|
||||
if remaining:
|
||||
placeholders2 = ",".join([f":n{j}" for j in range(len(remaining))])
|
||||
params2 = {f"n{j}": sku for j, sku in enumerate(remaining)}
|
||||
cur.execute(f"""
|
||||
SELECT DISTINCT codmat FROM NOM_ARTICOLE
|
||||
WHERE codmat IN ({placeholders2})
|
||||
""", params2)
|
||||
for row in cur:
|
||||
direct.add(row[0])
|
||||
|
||||
missing = skus - mapped - direct
|
||||
|
||||
logger.info(f"SKU validation: {len(mapped)} mapped, {len(direct)} direct, {len(missing)} missing")
|
||||
return {"mapped": mapped, "direct": direct, "missing": missing}
|
||||
|
||||
def classify_orders(orders, validation_result):
|
||||
"""Classify orders as importable or skipped based on SKU validation.
|
||||
Returns: (importable_orders, skipped_orders)
|
||||
Each skipped entry is a tuple of (order, list_of_missing_skus).
|
||||
"""
|
||||
ok_skus = validation_result["mapped"] | validation_result["direct"]
|
||||
importable = []
|
||||
skipped = []
|
||||
|
||||
for order in orders:
|
||||
order_skus = {item.sku for item in order.items if item.sku}
|
||||
order_missing = order_skus - ok_skus
|
||||
|
||||
if order_missing:
|
||||
skipped.append((order, list(order_missing)))
|
||||
else:
|
||||
importable.append(order)
|
||||
|
||||
return importable, skipped
|
||||
Reference in New Issue
Block a user