Files
atm/tests/test_detector.py
2026-04-21 07:25:38 +03:00

307 lines
10 KiB
Python

"""Tests for src/atm/detector.py."""
from __future__ import annotations
import numpy as np
import pytest
from atm.config import (
CanaryRegion,
ColorSpec,
Config,
DiscordCfg,
ROI,
TelegramCfg,
YAxisCalib,
)
from atm.detector import DetectionResult, Detector
# ---------------------------------------------------------------------------
# Constants
# ---------------------------------------------------------------------------
DOT_ROI = ROI(x=10, y=10, w=280, h=80)
BG_VAL = 18 # background pixel value (18, 18, 18)
# BGR values (OpenCV convention: B, G, R)
# turquoise RGB=(0,255,255) → BGR=(255,255,0)
# yellow RGB=(255,255,0) → BGR=(0,255,255)
TURQUOISE_BGR = (255, 255, 0)
YELLOW_BGR = (0, 255, 255)
# A purple-ish colour far from every palette entry (RGB=(100,150,50))
UNKNOWN_BGR = (50, 150, 100)
# ---------------------------------------------------------------------------
# Helpers
# ---------------------------------------------------------------------------
def _make_frame(*dot_specs: tuple[tuple[int, int, int], int, int]) -> np.ndarray:
"""Create a (100, 300, 3) uint8 BGR frame filled with background.
Each spec is (bgr_color, roi_x_start, roi_x_end) and paints a
full-height stripe inside DOT_ROI. roi_x_end=280 reaches the right
boundary so pixel_rgb sampling stays within the dot.
"""
frame = np.full((100, 300, 3), BG_VAL, dtype=np.uint8)
for bgr, x0, x1 in dot_specs:
fx0 = DOT_ROI.x + x0
fx1 = DOT_ROI.x + x1
fy0 = DOT_ROI.y
fy1 = DOT_ROI.y + DOT_ROI.h
frame[fy0:fy1, fx0:fx1] = bgr
return frame
def _make_cfg(debounce_depth: int = 1) -> Config:
colors = {
"turquoise": ColorSpec(rgb=(0, 255, 255), tolerance=30.0),
"yellow": ColorSpec(rgb=(255, 255, 0), tolerance=30.0),
"dark_green": ColorSpec(rgb=(0, 100, 0), tolerance=30.0),
"dark_red": ColorSpec(rgb=(100, 0, 0), tolerance=30.0),
"light_green": ColorSpec(rgb=(0, 255, 0), tolerance=30.0),
"light_red": ColorSpec(rgb=(255, 0, 0), tolerance=30.0),
"gray": ColorSpec(rgb=(128, 128, 128), tolerance=30.0),
}
return Config(
window_title="test",
dot_roi=DOT_ROI,
chart_roi=ROI(x=0, y=0, w=600, h=400),
colors=colors,
y_axis=YAxisCalib(p1_y=0, p1_price=100.0, p2_y=400, p2_price=80.0),
canary=CanaryRegion(
roi=ROI(x=0, y=0, w=50, h=50),
baseline_phash="0" * 64,
drift_threshold=8,
),
discord=DiscordCfg(webhook_url="http://example.com/hook"),
telegram=TelegramCfg(bot_token="tok", chat_id="123"),
debounce_depth=debounce_depth,
)
# ---------------------------------------------------------------------------
# Tests
# ---------------------------------------------------------------------------
def test_empty_roi_no_dot() -> None:
"""All-background frame → dot not found."""
frame = np.full((100, 300, 3), BG_VAL, dtype=np.uint8)
cfg = _make_cfg()
det = Detector(cfg, capture=lambda: frame)
r = det.step(0.0)
assert r.window_found is True
assert r.dot_found is False
assert r.rgb is None
assert r.match is None
assert r.accepted is False
def test_rightmost_cluster() -> None:
"""Two dots at different x positions → detector returns rightmost colour."""
# turquoise on the left, yellow extending to the right ROI edge
frame = _make_frame(
(TURQUOISE_BGR, 50, 100), # roi_x [50, 100)
(YELLOW_BGR, 200, 280), # roi_x [200, 280) → right edge
)
cfg = _make_cfg()
det = Detector(cfg, capture=lambda: frame)
r = det.step(0.0)
assert r.dot_found is True
assert r.match is not None
assert r.match.name == "yellow"
def test_debounce_depth_1() -> None:
"""depth=1: single valid frame → accepted=True."""
frame = _make_frame((YELLOW_BGR, 200, 280))
cfg = _make_cfg(debounce_depth=1)
det = Detector(cfg, capture=lambda: frame)
r = det.step(0.0)
assert r.accepted is True
assert r.color == "yellow"
def test_debounce_depth_2() -> None:
"""depth=2: first frame → accepted=False; second same → accepted=True."""
frame = _make_frame((YELLOW_BGR, 200, 280))
cfg = _make_cfg(debounce_depth=2)
det = Detector(cfg, capture=lambda: frame)
r1 = det.step(0.0)
r2 = det.step(1.0)
assert r1.accepted is False
assert r2.accepted is True
assert r2.color == "yellow"
def test_debounce_reset_on_change() -> None:
"""depth=2: A then B → neither accepted."""
frame_a = _make_frame((TURQUOISE_BGR, 200, 280))
frame_b = _make_frame((YELLOW_BGR, 200, 280))
cfg = _make_cfg(debounce_depth=2)
frames = iter([frame_a, frame_b])
det = Detector(cfg, capture=lambda: next(frames))
r1 = det.step(0.0)
r2 = det.step(1.0)
assert r1.accepted is False
assert r2.accepted is False
def test_unknown_not_accepted() -> None:
"""Colour outside every palette tolerance → UNKNOWN, accepted=False."""
frame = _make_frame((UNKNOWN_BGR, 200, 280))
cfg = _make_cfg(debounce_depth=1)
det = Detector(cfg, capture=lambda: frame)
r = det.step(0.0)
assert r.dot_found is True
assert r.match is not None
assert r.match.name == "UNKNOWN"
assert r.accepted is False
assert r.color is None
def test_window_lost() -> None:
"""capture() returns None → window_found=False, safe defaults."""
cfg = _make_cfg()
det = Detector(cfg, capture=lambda: None)
r = det.step(0.0)
assert r.window_found is False
assert r.dot_found is False
assert r.rgb is None
assert r.match is None
assert r.accepted is False
assert r.color is None
def test_rolling_window() -> None:
"""Rolling window never exceeds 20 entries."""
frame = _make_frame((YELLOW_BGR, 200, 280))
cfg = _make_cfg()
det = Detector(cfg, capture=lambda: frame)
for i in range(25):
det.step(float(i))
assert len(det.rolling) <= 20
assert len(det.rolling) == 20
# ---------------------------------------------------------------------------
# Fused-blob regression: anti-aliased bridges merge adjacent dots into one
# connected component. The rightmost component's centroid then lands on an
# interior dot (wrong colour). find_rightmost_dot must anchor to the right
# edge for wide blobs so the truly-rightmost dot is sampled.
# See vision.find_rightmost_dot and logs/fires/20260420_210649_ss.png.
# ---------------------------------------------------------------------------
def _make_fused_stripe_frame(
gray_segments: int,
tail_bgr: tuple[int, int, int],
seg_w: int = 13,
stripe_h: int = 13,
) -> np.ndarray:
"""Continuous multi-colour stripe: N gray segments + one tail-colour segment.
Survives 2-iter erosion as a single component — exactly the failure mode on
real screenshots where anti-aliased bridges fuse the whole dot row into one
component. Centroid lands on an interior gray segment; the right edge lies
inside the tail colour.
"""
frame = np.full((100, 300, 3), BG_VAL, dtype=np.uint8)
y0 = DOT_ROI.y + (DOT_ROI.h - stripe_h) // 2
x0 = DOT_ROI.x + 40
gray_bgr = (128, 128, 128)
for i in range(gray_segments):
xs = x0 + i * seg_w
frame[y0:y0 + stripe_h, xs:xs + seg_w] = gray_bgr
xs = x0 + gray_segments * seg_w
frame[y0:y0 + stripe_h, xs:xs + seg_w] = tail_bgr
return frame
@pytest.mark.parametrize(
("screenshot", "expected"),
[
("logs/fires/20260420_210649_ss.png", "dark_red"),
("logs/fires/20260420_200603_poll.png", "dark_green"),
],
)
def test_real_screenshot_rightmost_dot(screenshot: str, expected: str) -> None:
"""Regression on live-capture frames where fused blobs hid the rightmost dot.
2026-04-20 live session missed both a dark_red (21:06:49) and a dark_green
(20:06:03) because find_rightmost_dot returned the centroid of a multi-dot
fused component. Skips cleanly if the sample PNG is not checked out locally
(logs/fires/ is gitignored).
"""
import cv2
from pathlib import Path
from atm.config import ROI
from atm.vision import classify_pixel, crop_roi, find_rightmost_dot, pixel_rgb
path = Path(screenshot)
if not path.exists():
pytest.skip(f"sample not available: {path}")
frame = cv2.imread(str(path))
assert frame is not None
# Matches configs/2026-04-18-1220.toml dot_roi — the live config that missed
# these alerts.
roi = ROI(x=0, y=712, w=1796, h=35)
crop = crop_roi(frame, roi)
dot = find_rightmost_dot(crop, bg_rgb=(0, 0, 0), bg_tol=25.0)
assert dot is not None, "rightmost dot must be found"
rgb = pixel_rgb(crop, *dot)
palette = {
"turquoise": ((0, 153, 153), 60.0),
"yellow": ((153, 153, 0), 60.0),
"dark_green": ((0, 122, 0), 60.0),
"dark_red": ((128, 0, 0), 60.0),
"light_green": ((0, 171, 0), 60.0),
"light_red": ((171, 0, 0), 60.0),
"gray": ((128, 128, 128), 60.0),
}
match = classify_pixel(rgb, palette)
assert match.name == expected, (
f"{path.name}: expected {expected}, got {match.name} at {dot} RGB={rgb}"
)
def test_fused_blob_samples_rightmost_dot() -> None:
"""Fused multi-colour stripe must classify the rightmost colour, not the
centroid colour. Pre-fix the centroid fell on an interior gray segment
on real screenshots (2026-04-20 dark_red/dark_green misses)."""
dark_red_bgr = (0, 0, 100) # BGR for dark_red RGB=(100,0,0)
frame = _make_fused_stripe_frame(gray_segments=7, tail_bgr=dark_red_bgr)
cfg = _make_cfg()
from atm.config import ColorSpec
cfg.colors["gray"] = ColorSpec(rgb=(128, 128, 128), tolerance=30.0)
cfg.colors["dark_red"] = ColorSpec(rgb=(100, 0, 0), tolerance=30.0)
det = Detector(cfg, capture=lambda: frame)
r = det.step(0.0)
assert r.dot_found is True
assert r.match is not None
assert r.match.name == "dark_red", (
f"expected dark_red (rightmost segment), got {r.match.name} at "
f"{r.dot_pos_abs} RGB={r.rgb} — centroid regression"
)