#!/usr/bin/env python # -*- coding:utf-8 -*- import numpy as np import cv2 import cv2.cv as cv from video import create_capture from common import clock, draw_str help_message = ''' USAGE: facedetect.py [--cascade ] [--nested-cascade ] [] ''' def detect(img, cascade): rects = cascade.detectMultiScale(img, scaleFactor=1.3, minNeighbors=4, minSize=(30, 30), flags = cv.CV_HAAR_SCALE_IMAGE) if len(rects) == 0: return [] rects[:,2:] += rects[:,:2] return rects def draw_rects(img, rects, color): for x1, y1, x2, y2 in rects: cv2.rectangle(img, (x1, y1), (x2, y2), color, 2) if __name__ == '__main__': import sys, getopt print help_message args, video_src = getopt.getopt(sys.argv[1:], '', ['cascade=', 'nested-cascade=']) try: video_src = video_src[0] except: video_src = 0 args = dict(args) cascade_fn = args.get('--cascade', "../../data/haarcascades/haarcascade_frontalface_alt.xml") nested_fn = args.get('--nested-cascade', "../../data/haarcascades/haarcascade_eye.xml") cascade = cv2.CascadeClassifier(cascade_fn) nested = cv2.CascadeClassifier(nested_fn) cam = create_capture(video_src, fallback='synth:bg=../cpp/lena.jpg:noise=0.05') while True: ret, img = cam.read() gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) gray = cv2.equalizeHist(gray) t = clock() rects = detect(gray, cascade) vis = img.copy() draw_rects(vis, rects, (0, 255, 0)) for x1, y1, x2, y2 in rects: roi = gray[y1:y2, x1:x2] vis_roi = vis[y1:y2, x1:x2] subrects = detect(roi.copy(), nested) draw_rects(vis_roi, subrects, (255, 0, 0)) # 人を検知したらMQTTにメッセージをPublishする import paho.mqtt.client as mqtt client = mqtt.Client() client.connect('127.0.0.1', 1883, keepalive=60) client.publish('topic/aitc', '人を発見!') dt = clock() - t draw_str(vis, (20, 20), 'time: %.1f ms' % (dt*1000)) cv2.imshow('facedetect', vis) if 0xFF & cv2.waitKey(5) == 27: break cv2.destroyAllWindows()