- Interactive mission selector with metadata-driven design - 5 educational missions (basics + advanced) - AI assistant roles (Deepbit, Bugsy, Schnippsi, Tobi) - SnakeCam gesture recognition system - Token tracking utilities - CLAUDE.md documentation - .gitignore for logs and secrets
78 lines
2.7 KiB
Python
Executable File
78 lines
2.7 KiB
Python
Executable File
# gestures_v3.py
|
||
import cv2
|
||
import numpy as np
|
||
from datetime import datetime
|
||
|
||
# Pfad zum temporären Snapshot zur Diagnose
|
||
DEBUG_SNAPSHOT_PATH = "/tmp/roi_snapshot.jpg"
|
||
|
||
def detect_hand_gesture(frame):
|
||
"""
|
||
Erkenne einfache Handgesten wie 'wave' (offene Hand) und 'fist' (geschlossene Faust)
|
||
durch Analyse der Konturen im unteren rechten Bildbereich.
|
||
Die Erkennung basiert auf konvexer Hüllenerkennung.
|
||
|
||
Args:
|
||
frame (ndarray): Das aktuelle Kamerabild
|
||
|
||
Returns:
|
||
(str, ndarray): (Geste, Region-of-Interest-Ausschnitt)
|
||
"""
|
||
height, width, _ = frame.shape
|
||
|
||
# ROI: untere rechte Ecke (¼ des Bildes)
|
||
roi = frame[int(height * 0.6):height, int(width * 0.6):width]
|
||
|
||
# Speichere Snapshot für Debug-Zwecke
|
||
cv2.imwrite(DEBUG_SNAPSHOT_PATH, roi)
|
||
|
||
# Konvertiere zu HSV
|
||
hsv = cv2.cvtColor(roi, cv2.COLOR_BGR2HSV)
|
||
|
||
# Hautfarbmaske (angepasst für unterschiedliche Beleuchtung)
|
||
lower_skin = np.array([0, 30, 60], dtype=np.uint8)
|
||
upper_skin = np.array([20, 150, 255], dtype=np.uint8)
|
||
mask = cv2.inRange(hsv, lower_skin, upper_skin)
|
||
|
||
# Glätten und Konturen erkennen
|
||
mask = cv2.GaussianBlur(mask, (7, 7), 0)
|
||
contours, _ = cv2.findContours(mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
|
||
|
||
if contours and len(contours) > 0:
|
||
# Größte Kontur (Handfläche)
|
||
contour = max(contours, key=cv2.contourArea)
|
||
|
||
# Fehler vermeiden: Fläche zu klein
|
||
if cv2.contourArea(contour) < 1000:
|
||
return ("none", roi)
|
||
|
||
# Konvexe Hülle und Defekte
|
||
hull = cv2.convexHull(contour, returnPoints=False)
|
||
if hull is not None and len(hull) > 3:
|
||
defects = cv2.convexityDefects(contour, hull)
|
||
|
||
if defects is not None:
|
||
finger_count = 0
|
||
for i in range(defects.shape[0]):
|
||
s, e, f, d = defects[i, 0]
|
||
start = tuple(contour[s][0])
|
||
end = tuple(contour[e][0])
|
||
far = tuple(contour[f][0])
|
||
|
||
# Abstand analysieren – je mehr Defekte, desto mehr Finger offen
|
||
a = np.linalg.norm(np.array(start) - np.array(end))
|
||
b = np.linalg.norm(np.array(start) - np.array(far))
|
||
c = np.linalg.norm(np.array(end) - np.array(far))
|
||
angle = np.arccos((b ** 2 + c ** 2 - a ** 2) / (2 * b * c + 1e-5))
|
||
|
||
if angle <= np.pi / 2: # < 90°
|
||
finger_count += 1
|
||
|
||
# Auswertung basierend auf Fingeranzahl
|
||
if finger_count >= 3:
|
||
return ("wave", roi)
|
||
elif finger_count == 0:
|
||
return ("fist", roi)
|
||
|
||
return ("none", roi)
|