Files
gomag-vending/scripts/parse_sync_log.py
2026-03-11 12:32:25 +02:00

307 lines
9.4 KiB
Python

#!/usr/bin/env python3
"""
Parser pentru log-urile sync_comenzi_web.
Extrage comenzi esuate, SKU-uri lipsa, si genereaza un sumar.
Suporta atat formatul vechi (verbose) cat si formatul nou (compact).
Utilizare:
python parse_sync_log.py # Ultimul log din vfp/log/
python parse_sync_log.py <fisier.log> # Log specific
python parse_sync_log.py --skus # Doar lista SKU-uri lipsa
python parse_sync_log.py --dir /path/to/logs # Director custom
"""
import os
import sys
import re
import glob
import argparse
# Regex pentru linii cu timestamp (intrare noua in log)
RE_TIMESTAMP = re.compile(r'^\[(\d{2}:\d{2}:\d{2})\]\s+\[(\w+\s*)\]\s*(.*)')
# Regex format NOU: [N/Total] OrderNumber P:X A:Y/Z -> OK/ERR details
RE_COMPACT_OK = re.compile(r'\[(\d+)/(\d+)\]\s+(\S+)\s+.*->\s+OK\s+ID:(\S+)')
RE_COMPACT_ERR = re.compile(r'\[(\d+)/(\d+)\]\s+(\S+)\s+.*->\s+ERR\s+(.*)')
# Regex format VECHI (backwards compat)
RE_SKU_NOT_FOUND = re.compile(r'SKU negasit.*?:\s*(\S+)')
RE_PRICE_POLICY = re.compile(r'Pretul pentru acest articol nu a fost gasit')
RE_FAILED_ORDER = re.compile(r'Import comanda esuat pentru\s+(\S+)')
RE_ARTICOL_ERR = re.compile(r'Eroare adaugare articol\s+(\S+)')
RE_ORDER_PROCESS = re.compile(r'Procesez comanda:\s+(\S+)\s+din\s+(\S+)')
RE_ORDER_SUCCESS = re.compile(r'SUCCES: Comanda importata.*?ID Oracle:\s+(\S+)')
# Regex comune
RE_SYNC_END = re.compile(r'SYNC END\s*\|.*?(\d+)\s+processed.*?(\d+)\s+ok.*?(\d+)\s+err')
RE_STATS_LINE = re.compile(r'Duration:\s*(\S+)\s*\|\s*Orders:\s*(\S+)')
RE_STOPPED_EARLY = re.compile(r'Peste \d+.*ero|stopped early')
def find_latest_log(log_dir):
"""Gaseste cel mai recent log sync_comenzi din directorul specificat."""
pattern = os.path.join(log_dir, 'sync_comenzi_*.log')
files = glob.glob(pattern)
if not files:
return None
return max(files, key=os.path.getmtime)
def parse_log_entries(lines):
"""Parseaza liniile log-ului in intrari structurate."""
entries = []
current = None
for line in lines:
line = line.rstrip('\n\r')
m = RE_TIMESTAMP.match(line)
if m:
if current:
entries.append(current)
current = {
'time': m.group(1),
'level': m.group(2).strip(),
'text': m.group(3),
'full': line,
'continuation': []
}
elif current is not None:
current['continuation'].append(line)
current['text'] += '\n' + line
if current:
entries.append(current)
return entries
def extract_sku_from_error(err_text):
"""Extrage SKU din textul erorii (diverse formate)."""
# SKU_NOT_FOUND: 8714858424056
m = re.search(r'SKU_NOT_FOUND:\s*(\S+)', err_text)
if m:
return ('SKU_NOT_FOUND', m.group(1))
# PRICE_POLICY: 8000070028685
m = re.search(r'PRICE_POLICY:\s*(\S+)', err_text)
if m:
return ('PRICE_POLICY', m.group(1))
# Format vechi: SKU negasit...NOM_ARTICOLE: xxx
m = RE_SKU_NOT_FOUND.search(err_text)
if m:
return ('SKU_NOT_FOUND', m.group(1))
# Format vechi: Eroare adaugare articol xxx
m = RE_ARTICOL_ERR.search(err_text)
if m:
return ('ARTICOL_ERROR', m.group(1))
# Format vechi: Pretul...
if RE_PRICE_POLICY.search(err_text):
return ('PRICE_POLICY', '(SKU necunoscut)')
return (None, None)
def analyze_entries(entries):
"""Analizeaza intrarile si extrage informatii relevante."""
result = {
'start_time': None,
'end_time': None,
'duration': None,
'total_orders': 0,
'success_orders': 0,
'error_orders': 0,
'stopped_early': False,
'failed': [],
'missing_skus': [],
}
seen_skus = set()
current_order = None
for entry in entries:
text = entry['text']
level = entry['level']
# Start/end time
if entry['time']:
if result['start_time'] is None:
result['start_time'] = entry['time']
result['end_time'] = entry['time']
# Format NOU: SYNC END line cu statistici
m = RE_SYNC_END.search(text)
if m:
result['total_orders'] = int(m.group(1))
result['success_orders'] = int(m.group(2))
result['error_orders'] = int(m.group(3))
# Format NOU: compact OK line
m = RE_COMPACT_OK.search(text)
if m:
continue
# Format NOU: compact ERR line
m = RE_COMPACT_ERR.search(text)
if m:
order_nr = m.group(3)
err_detail = m.group(4).strip()
err_type, sku = extract_sku_from_error(err_detail)
if err_type and sku:
result['failed'].append((order_nr, err_type, sku))
if sku not in seen_skus and sku != '(SKU necunoscut)':
seen_skus.add(sku)
result['missing_skus'].append(sku)
else:
result['failed'].append((order_nr, 'ERROR', err_detail[:60]))
continue
# Stopped early
if RE_STOPPED_EARLY.search(text):
result['stopped_early'] = True
# Format VECHI: statistici din sumar
if 'Total comenzi procesate:' in text:
try:
result['total_orders'] = int(text.split(':')[-1].strip())
except ValueError:
pass
if 'Comenzi importate cu succes:' in text:
try:
result['success_orders'] = int(text.split(':')[-1].strip())
except ValueError:
pass
if 'Comenzi cu erori:' in text:
try:
result['error_orders'] = int(text.split(':')[-1].strip())
except ValueError:
pass
# Format VECHI: Duration line
m = RE_STATS_LINE.search(text)
if m:
result['duration'] = m.group(1)
# Format VECHI: erori
if level == 'ERROR':
m_fail = RE_FAILED_ORDER.search(text)
if m_fail:
current_order = m_fail.group(1)
m = RE_ORDER_PROCESS.search(text)
if m:
current_order = m.group(1)
err_type, sku = extract_sku_from_error(text)
if err_type and sku:
order_nr = current_order or '?'
result['failed'].append((order_nr, err_type, sku))
if sku not in seen_skus and sku != '(SKU necunoscut)':
seen_skus.add(sku)
result['missing_skus'].append(sku)
# Duration din SYNC END
m = re.search(r'\|\s*(\d+)s\s*$', text)
if m:
result['duration'] = m.group(1) + 's'
return result
def format_report(result, log_path):
"""Formateaza raportul complet."""
lines = []
lines.append('=== SYNC LOG REPORT ===')
lines.append(f'File: {os.path.basename(log_path)}')
duration = result["duration"] or "?"
start = result["start_time"] or "?"
end = result["end_time"] or "?"
lines.append(f'Run: {start} - {end} ({duration})')
lines.append('')
stopped = 'YES' if result['stopped_early'] else 'NO'
lines.append(
f'SUMMARY: {result["total_orders"]} processed, '
f'{result["success_orders"]} success, '
f'{result["error_orders"]} errors '
f'(stopped early: {stopped})'
)
lines.append('')
if result['failed']:
lines.append('FAILED ORDERS:')
seen = set()
for order_nr, err_type, sku in result['failed']:
key = (order_nr, err_type, sku)
if key not in seen:
seen.add(key)
lines.append(f' {order_nr:<12} {err_type:<18} {sku}')
lines.append('')
if result['missing_skus']:
lines.append(f'MISSING SKUs ({len(result["missing_skus"])} unique):')
for sku in sorted(result['missing_skus']):
lines.append(f' {sku}')
lines.append('')
return '\n'.join(lines)
def main():
parser = argparse.ArgumentParser(
description='Parser pentru log-urile sync_comenzi_web'
)
parser.add_argument(
'logfile', nargs='?', default=None,
help='Fisier log specific (default: ultimul din vfp/log/)'
)
parser.add_argument(
'--skus', action='store_true',
help='Afiseaza doar lista SKU-uri lipsa (una pe linie)'
)
parser.add_argument(
'--dir', default=None,
help='Director cu log-uri (default: vfp/log/ relativ la script)'
)
args = parser.parse_args()
if args.logfile:
log_path = args.logfile
else:
if args.dir:
log_dir = args.dir
else:
script_dir = os.path.dirname(os.path.abspath(__file__))
project_dir = os.path.dirname(script_dir)
log_dir = os.path.join(project_dir, 'vfp', 'log')
log_path = find_latest_log(log_dir)
if not log_path:
print(f'Nu am gasit fisiere sync_comenzi_*.log in {log_dir}',
file=sys.stderr)
sys.exit(1)
if not os.path.isfile(log_path):
print(f'Fisierul nu exista: {log_path}', file=sys.stderr)
sys.exit(1)
with open(log_path, 'r', encoding='utf-8', errors='replace') as f:
lines = f.readlines()
entries = parse_log_entries(lines)
result = analyze_entries(entries)
if args.skus:
for sku in sorted(result['missing_skus']):
print(sku)
else:
print(format_report(result, log_path))
if __name__ == '__main__':
main()