Compare commits

...

1 Commits

Author SHA1 Message Date
Adriano 7e076deb80 feat(web): match overlay con edge filtrati + UCS + rimozione bbox ROI
_draw_matches ora coerente con anteprima modello:

- Edge filtrati con stessa pipeline matcher (hysteresis weak/strong_grad)
  e selezione feature: l'overlay del match riflette esattamente quello
  che l'utente ha visto nel preview "Anteprima edge"
- Background tinta scura su pixel hysteresis (40% colore match)
- Feature scelte come dot colorati per bin (palette 16 bin)
- UCS rosso/verde sul centro pose: asse X destra, Y giu' (image y-down),
  ruotato secondo angle del match
- Origine UCS: cerchio bianco con bordo nero per visibilita'

Rimossi (richiesta utente "togli la ROI"):
- bbox poly perimetrale: ridondante, copriva il pezzo
- linea marker primo lato: sostituita da UCS rosso

Compatibilita': se matcher non passato (es. uso esterno), fallback
Canny legacy. Tutti e 3 endpoint match (/match, /match_simple,
/match_recipe) ora propagano il matcher a _draw_matches.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
2026-05-05 10:55:54 +02:00
+79 -23
View File
@@ -131,44 +131,100 @@ def _encode_png(img: np.ndarray) -> bytes:
def _draw_matches(scene: np.ndarray, matches: list[Match], def _draw_matches(scene: np.ndarray, matches: list[Match],
template_gray: np.ndarray | None) -> np.ndarray: template_gray: np.ndarray | None,
matcher: "LineShapeMatcher | None" = None) -> np.ndarray:
"""Disegna match annotati sulla scena.
Se matcher e' passato, usa la stessa pipeline di edge filtering
(hysteresis weak/strong_grad) e selezione feature usata in training,
cosi' l'overlay nel match riflette ESATTAMENTE quello che l'utente
ha visto nel preview "Anteprima edge". Inoltre disegna UCS
(asse X rosso, Y verde) sul centro pose del match.
Senza matcher: fallback Canny (legacy).
"""
out = scene.copy() out = scene.copy()
H, W = scene.shape[:2] H, W = scene.shape[:2]
palette = [ palette = [
(0, 255, 0), (0, 200, 255), (255, 100, 100), (255, 200, 0), (0, 255, 0), (0, 200, 255), (255, 100, 100), (255, 200, 0),
(200, 0, 255), (100, 255, 200), (255, 0, 0), (0, 255, 255), (200, 0, 255), (100, 255, 200), (255, 0, 0), (0, 255, 255),
] ]
bin_colors = [
(255, 0, 0), (255, 128, 0), (255, 255, 0), (0, 255, 0),
(0, 255, 255), (0, 128, 255), (0, 0, 255), (255, 0, 255),
(255, 100, 100), (255, 180, 100), (255, 230, 100), (180, 255, 100),
(100, 255, 200), (100, 180, 255), (180, 100, 255), (255, 100, 200),
]
for i, m in enumerate(matches): for i, m in enumerate(matches):
color = palette[i % len(palette)] color = palette[i % len(palette)]
if template_gray is not None: if template_gray is not None:
t = template_gray t = template_gray
th, tw = t.shape th, tw = t.shape
edge = cv2.Canny(t, 50, 150)
cx_t = (tw - 1) / 2.0; cy_t = (th - 1) / 2.0 cx_t = (tw - 1) / 2.0; cy_t = (th - 1) / 2.0
M = cv2.getRotationMatrix2D((cx_t, cy_t), m.angle_deg, m.scale) M = cv2.getRotationMatrix2D((cx_t, cy_t), m.angle_deg, m.scale)
M[0, 2] += m.cx - cx_t M[0, 2] += m.cx - cx_t
M[1, 2] += m.cy - cy_t M[1, 2] += m.cy - cy_t
warped = cv2.warpAffine(edge, M, (W, H), if matcher is not None:
flags=cv2.INTER_NEAREST, borderValue=0) # Edge filtrati con stessi param matcher (hysteresis)
mask = warped > 0 warped_gray = cv2.warpAffine(
if mask.any(): t, M, (W, H), flags=cv2.INTER_LINEAR, borderValue=0)
overlay = np.zeros_like(out) mag, bins = matcher._gradient(warped_gray)
overlay[mask] = color if matcher.weak_grad < matcher.strong_grad:
out[mask] = (0.3 * out[mask] + 0.7 * overlay[mask]).astype(np.uint8) edge_mask = matcher._hysteresis_mask(mag)
poly = m.bbox_poly.astype(np.int32).reshape(-1, 1, 2) else:
cv2.polylines(out, [poly], True, color, 2, cv2.LINE_AA) edge_mask = mag >= matcher.strong_grad
p0 = tuple(m.bbox_poly[0].astype(int)) # Background edge filtrati: tinta scura colore match
p1 = tuple(m.bbox_poly[1].astype(int)) if edge_mask.any():
cv2.line(out, p0, p1, color, 4, cv2.LINE_AA) bg_overlay = np.zeros_like(out)
dark = tuple(int(c * 0.35) for c in color)
bg_overlay[edge_mask] = dark
out = cv2.addWeighted(out, 1.0, bg_overlay, 0.7, 0)
# Feature scelte: estrazione alla pose, dot colorati per bin
fx, fy, fb = matcher._extract_features(mag, bins, None)
for k in range(len(fx)):
px, py = int(fx[k]), int(fy[k])
if 0 <= px < W and 0 <= py < H:
bcol = bin_colors[int(fb[k]) % len(bin_colors)]
cv2.circle(out, (px, py), 2, bcol, -1, cv2.LINE_AA)
else:
# Legacy Canny
edge = cv2.Canny(t, 50, 150)
warped = cv2.warpAffine(edge, M, (W, H),
flags=cv2.INTER_NEAREST, borderValue=0)
mask = warped > 0
if mask.any():
overlay = np.zeros_like(out)
overlay[mask] = color
out[mask] = (0.3 * out[mask] + 0.7 * overlay[mask]).astype(np.uint8)
# bbox poly e linea-marker rimossi (richiesta utente "togli la ROI"):
# UCS + edge filtrati gia' identificano pose e orientamento,
# il rettangolo aggiunto era ridondante e copriva il pezzo.
cx, cy = int(round(m.cx)), int(round(m.cy)) cx, cy = int(round(m.cx)), int(round(m.cy))
cv2.drawMarker(out, (cx, cy), color, cv2.MARKER_CROSS, 22, 2, cv2.LINE_AA) # UCS sul centro pose match (richiesta utente: come nell'anteprima
# modello). Asse X rosso destra, Y verde basso (image y-down).
# Lunghezza derivata dalla diagonale bbox per scala-invariante.
L = int(np.linalg.norm(m.bbox_poly[1] - m.bbox_poly[0])) // 2 L = int(np.linalg.norm(m.bbox_poly[1] - m.bbox_poly[0])) // 2
a = np.deg2rad(m.angle_deg) if L < 10:
cv2.arrowedLine(out, (cx, cy), L = 30 # fallback se bbox degenere
(int(cx + L * np.cos(a)), int(cy - L * np.sin(a))), ax = np.deg2rad(m.angle_deg)
color, 2, cv2.LINE_AA, tipLength=0.2) # X axis ruotato (rosso)
x_end = (int(cx + L * np.cos(ax)), int(cy - L * np.sin(ax)))
cv2.arrowedLine(out, (cx, cy), x_end,
(0, 0, 255), 2, cv2.LINE_AA, tipLength=0.2)
cv2.putText(out, "X", (x_end[0] + 4, x_end[1] + 5),
cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 1, cv2.LINE_AA)
# Y axis perpendicolare (verde, +90° in image coords = giu' visivo)
y_end = (int(cx + L * np.cos(ax + np.pi / 2)),
int(cy - L * np.sin(ax + np.pi / 2)))
cv2.arrowedLine(out, (cx, cy), y_end,
(0, 255, 0), 2, cv2.LINE_AA, tipLength=0.2)
cv2.putText(out, "Y", (y_end[0] + 4, y_end[1] + 12),
cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0), 1, cv2.LINE_AA)
# Origine UCS: cerchio bianco con bordo nero
cv2.circle(out, (cx, cy), 4, (0, 0, 0), -1, cv2.LINE_AA)
cv2.circle(out, (cx, cy), 3, (255, 255, 255), -1, cv2.LINE_AA)
label = f"#{i+1} {m.angle_deg:.0f}d s={m.scale:.2f} {m.score:.2f}" label = f"#{i+1} {m.angle_deg:.0f}d s={m.scale:.2f} {m.score:.2f}"
cv2.putText(out, label, (cx + 8, cy - 8), cv2.putText(out, label, (cx + 12, cy - 12),
cv2.FONT_HERSHEY_SIMPLEX, 0.5, color, 2, cv2.LINE_AA) cv2.FONT_HERSHEY_SIMPLEX, 0.5, color, 2, cv2.LINE_AA)
return out return out
@@ -511,7 +567,7 @@ def match(p: MatchParams):
# Render annotated image # Render annotated image
tg = cv2.cvtColor(roi_img, cv2.COLOR_BGR2GRAY) tg = cv2.cvtColor(roi_img, cv2.COLOR_BGR2GRAY)
annotated = _draw_matches(scene, matches, tg) annotated = _draw_matches(scene, matches, tg, matcher=m)
ann_id = _store_image(annotated) ann_id = _store_image(annotated)
return MatchResp( return MatchResp(
@@ -588,7 +644,7 @@ def match_simple(p: SimpleMatchParams):
t_find = time.time() - t0 t_find = time.time() - t0
tg = cv2.cvtColor(roi_img, cv2.COLOR_BGR2GRAY) tg = cv2.cvtColor(roi_img, cv2.COLOR_BGR2GRAY)
annotated = _draw_matches(scene, matches, tg) annotated = _draw_matches(scene, matches, tg, matcher=m)
ann_id = _store_image(annotated) ann_id = _store_image(annotated)
return MatchResp( return MatchResp(
@@ -864,7 +920,7 @@ def match_recipe(p: RecipeMatchParams):
) )
t_find = time.time() - t0 t_find = time.time() - t0
tg = m.template_gray if m.template_gray is not None else np.zeros((1, 1), np.uint8) tg = m.template_gray if m.template_gray is not None else np.zeros((1, 1), np.uint8)
annotated = _draw_matches(scene, matches, tg) annotated = _draw_matches(scene, matches, tg, matcher=m)
ann_id = _store_image(annotated) ann_id = _store_image(annotated)
return MatchResp( return MatchResp(
matches=[MatchResult( matches=[MatchResult(