Python | 基于Mediapipe框架的手势识别系统

一、项目要求

        1、题目

         本题着力于解决会商演示系统中的非接触式人机交互问题,具体而言,其核心问题就是通过计算机视觉技术实现对基于视频流的手势动作进行实时检测和识别。通过摄像头采集并识别控制者连续的手势动作,完成包括点击、平移、缩放、抓取、旋转等5种基本交互功能,除此之外还可针对不同客户的具体业务需求,可在这五种基本手势动作的基础上进行扩展。

        选手可利用传统计算机视觉方法或基于机器学习/深度学习的方法,通过对基于摄像头采集的连续视频输入中用户的手势动作进行检测和识别,输出相应的控制信号,从而完成会商演示系统的交互。结合业务需求,进行算法模型的开发,实现真实环境下对用户控制手势的识别,达到实时交互的目的。

        2、技术要求与指标

        能够实现对摄像头拍摄的视频流中控制手势进行检测和识别,并以此实时控制演示系统。指标要求:

        (1)每一种手势动作的检测识别准确率达到80%以上

        (2)每一个手势动作的检测和识别时间(即从执行完手势动作到输出结果之间的时间)不超过500ms

        一般开发环境以及开发语言不限(可使用Python+OpencCV,深度 学习框架可使用PyTorch、TensorFlow 等)。开发过程允许使用开源代码,但需要在文档中详细注明,且其许可证需保证商业可用,不能采用商用模块。

二、运行环境

        本系统能够运行在基于PC操作系统Windows环境下,要求Windows操作系统安装Python 3.9 及以上环境, 要求安装相关库OpenCV、Mediapipe、PyQt5、Qtawesome。

安装OpenCV —— python3.9安装OpenCV

安装Mediapipe —— Python安装Mediapipe

安装PyQt5 —— PyCharm安装PyQt5及其工具(Qt Designer、PyUIC、PyRcc)

安装Qtawesome —— Python安装Qtawesome

三、效果

        1、基本动作

                包括点击、抓取、平移、缩放、旋转5个基本动作

        2、扩展动作

                包括数字一、二、三、四、五、六、我爱你

四、代码

        1、项目结构

Python | 基于Mediapipe框架的手势识别系统_第1张图片

        2、项目代码

                (1)布局代码

# -*- coding: utf-8 -*-
import sys

import qtawesome
from PyQt5 import QtCore, QtWidgets
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QIcon


class WindowLayout(object):
    def __init__(self):
        # 容器
        self.central_widget = None

        # 组件
        self.close_button = None
        self.other_button = None
        self.minimize_button = None

        self.start_button = None
        self.camera_button = None

        self.camera_label = None
        self.result_label = None
        self.running_label = None

    def setupUi(self, MainWindow):
        # 平台
        MainWindow.setObjectName("MainWindow")
        MainWindow.resize(1250, 730)

        # ------------- 代码布局 ------------- #
        self.central_widget = QtWidgets.QWidget(MainWindow)
        self.central_widget.setObjectName("central_widget")
        MainWindow.setCentralWidget(self.central_widget) # 把容器放到平台上面

        # 关闭按钮
        self.close_button = QtWidgets.QPushButton(self.central_widget)
        self.close_button.setGeometry(QtCore.QRect(60, 60, 30, 30))
        self.close_button.setObjectName("close_button")
        # 空白按钮
        self.other_button = QtWidgets.QPushButton(self.central_widget)
        self.other_button.setGeometry(QtCore.QRect(120, 60, 30, 30))
        self.other_button.setObjectName("other_button")
        # 最小化按钮
        self.minimize_button = QtWidgets.QPushButton(self.central_widget)
        self.minimize_button.setGeometry(QtCore.QRect(180, 60, 30, 30))
        self.minimize_button.setObjectName("minimize_button")

        # 打开摄像头 按钮
        self.camera_button = QtWidgets.QPushButton(self.central_widget)
        self.camera_button.setIcon(qtawesome.icon('fa5s.video', color='white'))
        self.camera_button.setText(" 打开相机")
        self.camera_button.setGeometry(QtCore.QRect(60, 130, 150, 40))
        self.camera_button.setObjectName("camera_button")
        # 手势检测 按钮
        self.start_button = QtWidgets.QPushButton(self.central_widget)
        self.start_button.setIcon(qtawesome.icon('fa5s.eye', color='white'))
        self.start_button.setText(" 手势检测")
        self.start_button.setGeometry(QtCore.QRect(60, 190, 150, 40))
        self.start_button.setObjectName("start_button")

        # 检测结果展示部分
        self.result_label = QtWidgets.QLabel(self.central_widget)
        self.result_label.setText("结果")
        self.result_label.setGeometry(QtCore.QRect(50, 490, 170, 170))
        self.result_label.setObjectName("result_label")

        # 摄像头展示部分
        self.camera_label = QtWidgets.QLabel(self.central_widget)
        self.camera_label.setText("手势识别")
        self.camera_label.setGeometry(QtCore.QRect(300, 60, 900, 600))
        self.camera_label.setObjectName("camera_label")

        # 程序运行状态
        self.running_label = QtWidgets.QLabel(self.central_widget)
        self.running_label.setText("程序运行状态")
        self.running_label.setGeometry(QtCore.QRect(310, 665, 900, 40))
        self.running_label.setObjectName("running_label")

        # ------------- 界面美化 ------------- #
        self.central_widget.setStyleSheet('''
            QWidget#central_widget{
                border-radius:7px;
                border-image:url(background.png)};}
            ''')

        self.close_button.setStyleSheet('''
            QPushButton{
                background-color: rgba(247, 102, 119, 0.8);
                border-radius:8px;}
            QPushButton:hover{
                background-color: rgba(255, 0, 0, 0.7);}''')
        self.other_button.setStyleSheet('''
            QPushButton{
                background-color: rgba(250, 210, 116, 0.8);
                border-radius:8px;}
            QPushButton:hover{
                background-color: rgba(255, 255, 0, 0.8);}''')
        self.minimize_button.setStyleSheet('''
            QPushButton{
                background-color: rgba(50, 200, 50, 0.8);
                border-radius:8px;}
            QPushButton:hover{
                background-color: rgba(0, 250, 0, 0.8);}''')

        self.camera_button.setStyleSheet('''        
            QPushButton{
                border:none;
                color:white;
                font-size:15px;
                font-weight:bold;
                border-radius:8px;
                font-family:Roman times;
                background-color: rgba(200, 200, 200, 0.5);}
            QPushButton:hover{
                background-color: rgba(200, 200, 200, 0.6);}
        ''')

        self.start_button.setStyleSheet('''
            QPushButton{
                border:none;
                color:white;
                font-size:15px;
                font-weight:bold;
                border-radius:8px;
                font-family:Roman times;
                background-color: rgba(200, 200, 200, 0.4);}
            QPushButton:hover{
                background-color: rgba(200, 200, 200, 0.6);}
        ''')

        self.camera_label.setAlignment(Qt.AlignCenter)
        self.camera_label.setStyleSheet('''
                color:white;
                font-size:45px;
                font-weight:bold;
                font-family:Roman times;
                background-color: rgba(255, 255, 255, 0.3)
            ''')

        self.result_label.setAlignment(Qt.AlignCenter)
        self.result_label.setStyleSheet('''
                border-radius:5px;
                color:white;
                font-size:35px;
                font-weight:bold;
                font-family:Roman times;
                background-color: rgba(255, 255, 255, 0.3)
            ''')

        self.running_label.setStyleSheet('''
                color:white;
                font-size:16px;
                font-weight:bold;
                font-family:Roman times;
            ''')

        # 设置整体样式
        MainWindow.setWindowOpacity(1)  # 设置窗口透明度
        MainWindow.setAttribute(QtCore.Qt.WA_TranslucentBackground)  # 隐藏外围边框
        MainWindow.setWindowFlag(QtCore.Qt.FramelessWindowHint)  # 隐藏系统状态栏,并且生成的窗口用户不能移动和改变大小
        MainWindow.setWindowIcon(QIcon('Logo.ico'))  # 设置logo

        QtCore.QMetaObject.connectSlotsByName(MainWindow)



if __name__ == '__main__':
    # 创建一个Qt应用程序对象,用于管理应用程序的事件循环和窗口系统的交互。
    app = QtWidgets.QApplication(sys.argv)
    # 创建一个WindowLayout(自己写的类)对象,创建的时候自动进行初始化__init__
    windowLayout = WindowLayout()
    # 生成一个QtWidgets.QMainWindow对象,用于设置到 WindowLayout.setupUi() 方法中
    mainWindow = QtWidgets.QMainWindow()
    # 调用 WindowLayout.setupUi() 方法,将QtWidgets.QMainWindow对象作为参数传入
    windowLayout.setupUi(mainWindow)
    # 调用 QWidget.setupUi() 方法,展示界面
    mainWindow.show()
    # 调用系统方法进行界面关闭
    sys.exit(app.exec_())

                (2)逻辑代码

# -*- coding: utf-8 -*-
import math
import sys
from time import time

import cv2
import mediapipe as mp
from PyQt5.QtGui import *
from PyQt5.QtGui import QImage
from PyQt5.QtWidgets import QApplication, QMainWindow, QMessageBox
from PyQt5.QtWidgets import QDesktopWidget

from window_layout import WindowLayout


class WindowLogic(QMainWindow, WindowLayout):
    def __init__(self, parent=None):
        super().__init__(parent)
        self.setupUi(self)

        # ====== UI逻辑属性 ====== #
        self.close_button.clicked.connect(self.close_window)            # 关闭窗口按钮
        self.minimize_button.clicked.connect(self.showMinimized)        # 最小化窗口按钮
        self.camera_button.clicked.connect(self.camera_judgement)       # 打开相机按钮
        self.start_button.clicked.connect(self.recognition_judgement)   # 手势识别按钮

        # ====== 相机属性 ====== #
        self.cap = cv2.VideoCapture()   # 相机
        self.source = 0                 # 相机标号
        self.WIN_WIDTH = 900    # 相机展示画面的宽度
        self.WIN_HEIGHT = 600   # 相机展示画面的高度

        # ====== 手势识别属性 ====== #
        self.sole_hand_name = ''            # 单一手势名称
        self.sole_hand_landmarks = []       # 单一手势坐标
        self.current_frame_name = []        # 当前帧中手势名称
        self.current_frame_landmarks = []   # 当前帧中手势坐标
        self.current_frame_msg = []         # 当前帧信息
        self.stream_frame_msg = []          # 视频流前30帧信息
        self.INF = 65535.                   # 角度错误值
        self.normal_unbend_angle = 49.      # 正常手指伸直阈值
        self.normal_threshold_angle = 65.   # 正常手指弯曲阈值
        self.thumb_threshold_angle = 53.    # 大拇指弯曲阈值
        self.isGestureRecognition_flag = False  # 是否打开手势识别标志
        self.resultRetainFrame = 0

        # ====== 手势控制属性 ====== #
        self.desktop = QDesktopWidget()
        self.screen_rect = self.desktop.screenGeometry()
        self.screen_width = self.screen_rect.width()
        self.screen_height = self.screen_rect.height()
        self.x = None
        self.y = None

        # ====== 时间属性 ====== #
        self.camera_start_time = None
        self.recognize_start_time = None

    # ================================== 打开相机 ================================== #
    def camera_judgement(self):
        """ 打开相机按钮 逻辑判断器 """
        # 打开摄像头
        if not self.cap.isOpened():
            self.running_label.setText(u"正在打开相机,请稍等...")
            QApplication.processEvents()
            self.camera_start_time = time()
            self.cap.open(self.source)
            self.running_label.setText("相机模块初始化时间 : {:.3f}".format(time() - self.camera_start_time) + 's')
            try:
                self.camera_button.setText(u' 关闭相机')
                self.show_camera()
            except:
                QMessageBox.about(self, '警告', '相机不能正常被打开')
        # 关闭摄像头,释放cap
        else:
            if self.isGestureRecognition_flag:
                QMessageBox.about(self, '提示', '请先关闭手势识别模块')
            else:
                self.cap.release()
                self.camera_button.setText(u' 打开相机')
                self.start_button.setText(u' 手势检测')
                self.running_label.setText(u'已关闭相机模块')

    def show_camera(self):
        """ 展示摄像头画面 """
        while self.cap.isOpened():
            ret, frame = self.cap.read()
            QApplication.processEvents()
            show = cv2.resize(frame, (self.WIN_WIDTH, self.WIN_HEIGHT))
            show = cv2.cvtColor(show, cv2.COLOR_BGR2RGB)
            show_image = QImage(show.data, show.shape[1], show.shape[0], QImage.Format_RGB888)
            self.camera_label.setPixmap(QPixmap.fromImage(show_image))
        self.camera_label.setPixmap(QPixmap(""))

    # ================================== 手势识别 ================================== #
    def recognition_judgement(self):
        """ 手势识别按钮 逻辑判断器 """
        if not self.cap.isOpened():
            QMessageBox.about(self, '提示', '请先打开摄像头')
        else:
            if not self.isGestureRecognition_flag:
                self.isGestureRecognition_flag = True
                self.start_button.setText(u' 关闭检测')
                self.gesture_recognize()
                self.isGestureRecognition_flag = False
                self.start_button.setText(u' 手势检测')
            else:
                self.isGestureRecognition_flag = False
                self.start_button.setText(u' 手势检测')
                self.running_label.setText(u'已关闭检测模块')
                self.result_label.setText("结果")

    def gesture_recognize(self):
        """ 手势识别 """
        recognize_start_time = time()
        self.running_label.setText(u"正在打开手势识别模块,请稍等...")
        QApplication.processEvents()
        mp_drawing = mp.solutions.drawing_utils
        mp_hands = mp.solutions.hands
        hands = mp_hands.Hands(static_image_mode=False, max_num_hands=2,
                               min_detection_confidence=0.75,
                               min_tracking_confidence=0.75)
        self.stream_frame_msg = []
        self.running_label.setText("检测模块初始化时间 : {:.3f}".format(time() - recognize_start_time) + 's')

        while self.isGestureRecognition_flag:
            self.recognize_start_time = time()
            QApplication.processEvents()
            ret, frame = self.cap.read()
            frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
            result = hands.process(frame)
            frame = cv2.cvtColor(frame, cv2.COLOR_RGB2BGR)  # 颜色空间的转换

            self.current_frame_name = []
            self.current_frame_landmarks = []


            if result.multi_handedness:
                hand_num = len(result.multi_handedness)

                # 一只手
                if hand_num == 1:
                    if result.multi_hand_landmarks:

                        # 跑1次 for 循环
                        for hand_landmarks in result.multi_hand_landmarks:
                            mp_drawing.draw_landmarks(frame, hand_landmarks, mp_hands.HAND_CONNECTIONS)

                            self.sole_hand_name = ''
                            self.sole_hand_landmarks = []
                            for i in range(21):
                                x = hand_landmarks.landmark[i].x * frame.shape[1]
                                y = hand_landmarks.landmark[i].y * frame.shape[0]
                                self.sole_hand_landmarks.append((x, y))

                            if self.sole_hand_landmarks:
                                angle_list = self.get_hand_angle(self.sole_hand_landmarks)
                                self.sole_hand_name = self.recognize_static_gesture(angle_list)

                        self.current_frame_landmarks.append(self.sole_hand_landmarks)
                        self.current_frame_landmarks.append([])

                        self.current_frame_name.append(self.sole_hand_name)
                        self.current_frame_name.append('null')
                # 两只手
                elif hand_num == 2:
                    if result.multi_hand_landmarks:
                        # 跑2次 for 循环
                        for hand_landmarks in result.multi_hand_landmarks:
                            mp_drawing.draw_landmarks(frame, hand_landmarks, mp_hands.HAND_CONNECTIONS)

                            self.sole_hand_name = ''
                            self.sole_hand_landmarks = []

                            for i in range(21):
                                x = hand_landmarks.landmark[i].x * frame.shape[1]
                                y = hand_landmarks.landmark[i].y * frame.shape[0]
                                self.sole_hand_landmarks.append((x, y))

                            if self.sole_hand_landmarks:
                                angle_list = self.get_hand_angle(self.sole_hand_landmarks)
                                self.sole_hand_name = self.recognize_static_gesture(angle_list)

                            self.current_frame_landmarks.append(self.sole_hand_landmarks)
                            self.current_frame_name.append(self.sole_hand_name)

                    # 将当前手势的坐标、名称保存到 current_frame_msg 中
                self.current_frame_msg = [self.current_frame_landmarks, self.current_frame_name]
                # 将当前帧信息保存到 stream_frame_msg 中
                self.stream_frame_msg.append(self.current_frame_msg)

                self.recognize_dynamic_gesture()

                if len(self.stream_frame_msg) > 80:
                    j = self.stream_frame_msg[-40:-1]
                    j.append(self.stream_frame_msg[-1])
                    self.stream_frame_msg = j

            show_video = cv2.cvtColor(cv2.resize(frame, (self.WIN_WIDTH, self.WIN_HEIGHT)), cv2.COLOR_BGR2RGB)
            show_image = QImage(show_video.data, show_video.shape[1], show_video.shape[0], QImage.Format_RGB888)
            self.camera_label.setPixmap(QPixmap.fromImage(show_image))

        self.show_camera()

    def recognize_dynamic_gesture(self):
        """ 识别动态手势 """
        if len(self.stream_frame_msg) > 15:
            # 抓取
            if self.one_hand_judge_fist(-1) and not self.one_hand_judge_fist(-5):
                self.show_label('抓取/零')
            # 点击
            elif self.one_hand_judge_point(-1) and not self.one_hand_judge_point(-5):
                self.show_label('点击/一')
            # 平移
            elif self.one_hand_judge_spread(-1) and self.one_hand_judge_spread(-5) and self.one_hand_judge_spread(-10):
                if math.fabs(self.count_one_hand_delta_x(-1, -10)) >= 180 or \
                        math.fabs(self.count_one_hand_delta_x(-1, -5)) >= 90:
                    self.show_label('平移')
            elif self.two_hand_judge_spread(-1) and self.two_hand_judge_spread(-5) and self.two_hand_judge_spread(-10):
                # 缩放
                if (self.count_two_hand_delta_x(-1,-10,0) <= -50 and self.count_two_hand_delta_x(-1,-10,1) >= 50
                    and math.fabs(self.count_two_hand_delta_y(-1, -10, 0)) <= 20 and math.fabs(self.count_two_hand_delta_y(-1, -10, 1)) <= 20) or \
                        (self.count_two_hand_delta_x(-1,-10,0) >= 50 and self.count_two_hand_delta_x(-1,-10,1) <= -50
                         and math.fabs(self.count_two_hand_delta_y(-1, -10, 0)) <= 20 and math.fabs(self.count_two_hand_delta_y(-1, -10, 1)) <= 20):
                    self.show_label('缩放')
                # 旋转
                elif math.fabs(self.count_two_hand_delta_y(-1, -10, 0)) >= 50 \
                        and math.fabs(self.count_two_hand_delta_x(-1, -10, 0)) >= 50 \
                        and math.fabs(self.count_two_hand_delta_y(-1, -10, 1)) >= 50 \
                        and math.fabs(self.count_two_hand_delta_x(-1, -10, 1)) >= 50:
                    self.show_label('旋转')
            # I love Y
            elif self.two_hand_judge_thumbUp(-1) and self.two_hand_judge_thumbUp(-5) and self.two_hand_judge_thumbUp(-10):
                self.show_label('我爱你')
            # 手势 二
            elif self.one_hand_judge_two(-1) and not self.one_hand_judge_two(-5):
                self.show_label("二")
            # 手势 三
            elif self.one_hand_judge_three(-1) and not self.one_hand_judge_three(-5):
                self.show_label("三")
            # 手势 四
            elif self.one_hand_judge_four(-1) and not self.one_hand_judge_four(-5):
                self.show_label("四")
            # 手势 五
            elif self.one_hand_judge_spread(-1) and not self.one_hand_judge_spread(-5):
                self.show_label("五")
            # 手势 六
            elif self.one_hand_judge_six(-1) and not self.one_hand_judge_six(-5):
                self.show_label("六")

    def recognize_static_gesture(self, angle_list):
        """
        识别静态手势
        :param angle_list: 手势弯曲角度列表
        :return: 静态手势类型
        """
        gesture_type = None
        if self.INF not in angle_list:
            if (angle_list[0] > self.thumb_threshold_angle) and (angle_list[1] > self.normal_threshold_angle) \
                    and (angle_list[2] > self.normal_threshold_angle) and (angle_list[3] > self.normal_threshold_angle) \
                    and (angle_list[4] > self.normal_threshold_angle):
                gesture_type = "fist"

            elif (angle_list[0] < self.normal_unbend_angle) and (angle_list[1] < self.normal_unbend_angle) \
                    and (angle_list[2] < self.normal_unbend_angle) and (angle_list[3] < self.normal_unbend_angle) and \
                    (angle_list[4] < self.normal_unbend_angle):
                gesture_type = "spread"

            elif (angle_list[0] > 5) and (angle_list[1] < self.normal_unbend_angle) \
                    and (angle_list[2] > self.normal_threshold_angle) and (angle_list[3] > self.normal_threshold_angle) \
                    and (angle_list[4] > self.normal_threshold_angle):
                gesture_type = "point"

            elif (angle_list[0] > self.thumb_threshold_angle) and (angle_list[1] < self.normal_unbend_angle) and (
                    angle_list[2] < self.normal_unbend_angle) and (angle_list[3] > self.normal_threshold_angle) and (
                    angle_list[4] > self.normal_threshold_angle):
                gesture_type = "two"

            elif (angle_list[0] > self.thumb_threshold_angle) and (angle_list[1] < self.normal_unbend_angle) and (
                    angle_list[2] < self.normal_unbend_angle) and (angle_list[3] < self.normal_unbend_angle) and (
                    angle_list[4] > self.normal_threshold_angle):
                gesture_type = "three"

            elif (angle_list[0] > self.thumb_threshold_angle) and (angle_list[1] < self.normal_unbend_angle) and (
                    angle_list[2] < self.normal_unbend_angle) and (angle_list[3] < self.normal_unbend_angle) and (
                    angle_list[4] < self.normal_threshold_angle):
                gesture_type = "four"

            elif (angle_list[0] < self.normal_unbend_angle) and (angle_list[1] > self.normal_threshold_angle) and (
                    angle_list[2] > self.normal_threshold_angle) and (angle_list[3] > self.normal_threshold_angle) and (
                    angle_list[4] < self.normal_unbend_angle):
                gesture_type = "six"
            elif (angle_list[0] < self.normal_unbend_angle) and (angle_list[1] > self.normal_threshold_angle) and (
                    angle_list[2] > self.normal_threshold_angle) and (angle_list[3] > self.normal_threshold_angle) and (
                    angle_list[4] > self.normal_threshold_angle):
                gesture_type = "thumbUp"

        return gesture_type

    def get_hand_angle(self, coordinate):
        """
        获取当前手势弯曲角度
        :param coordinate: 手势弯曲角度坐标
        :return: 手势弯曲角度列表
        """
        angle_list = []
        # 大拇指角度
        angle = self.compute_hand_angle(
            ((int(coordinate[0][0]) - int(coordinate[2][0])),(int(coordinate[0][1]) - int(coordinate[2][1]))),
            ((int(coordinate[3][0]) - int(coordinate[4][0])),(int(coordinate[3][1]) - int(coordinate[4][1]))))
        angle_list.append(angle)

        # 食指角度
        angle = self.compute_hand_angle(
            ((int(coordinate[0][0]) - int(coordinate[6][0])),(int(coordinate[0][1]) - int(coordinate[6][1]))),
            ((int(coordinate[7][0]) - int(coordinate[8][0])),(int(coordinate[7][1]) - int(coordinate[8][1]))))
        angle_list.append(angle)

        # 中指角度
        angle = self.compute_hand_angle(
            ((int(coordinate[0][0]) - int(coordinate[10][0])),(int(coordinate[0][1]) - int(coordinate[10][1]))),
            ((int(coordinate[11][0]) - int(coordinate[12][0])),(int(coordinate[11][1]) - int(coordinate[12][1]))))
        angle_list.append(angle)

        # 无名指角度
        angle = self.compute_hand_angle(
            ((int(coordinate[0][0]) - int(coordinate[14][0])),(int(coordinate[0][1]) - int(coordinate[14][1]))),
            ((int(coordinate[15][0]) - int(coordinate[16][0])),(int(coordinate[15][1]) - int(coordinate[16][1]))))
        angle_list.append(angle)

        # 小拇指角度
        angle = self.compute_hand_angle(
            ((int(coordinate[0][0]) - int(coordinate[18][0])),(int(coordinate[0][1]) - int(coordinate[18][1]))),
            ((int(coordinate[19][0]) - int(coordinate[20][0])),(int(coordinate[19][1]) - int(coordinate[20][1]))))
        angle_list.append(angle)
        return angle_list

    def compute_hand_angle(self, A, B):
        """
        计算指定手指弯曲角度
        :param A: 向量端点A
        :param B: 向量端点B
        :return: 向量AB
        """
        ax, ay = A[0], A[1]
        bx, by = B[0], B[1]
        try:
            angle = math.degrees(math.acos((ax * bx + ay * by) / (((ax ** 2 + ay ** 2) ** 0.5) * ((bx ** 2 + by ** 2) ** 0.5))))
        except:
            angle = self.INF

        if angle > 180.:
            angle = self.INF
        return angle

    def one_hand_judge_fist(self, i):
        return self.stream_frame_msg[i][1][0] == 'fist' and self.stream_frame_msg[i][1][1] == 'null'

    def one_hand_judge_spread(self, i):
        return self.stream_frame_msg[i][1][0] == 'spread' and self.stream_frame_msg[i][1][1] == 'null'

    def one_hand_judge_point(self, i):
        return self.stream_frame_msg[i][1][0] == 'point' and self.stream_frame_msg[i][1][1] == 'null'

    def two_hand_judge_fist(self, i):
        return self.stream_frame_msg[i][1][0] == 'fist' and self.stream_frame_msg[i][1][1] == 'fist'

    def two_hand_judge_spread(self, i):
        return self.stream_frame_msg[i][1][0] == 'spread' and self.stream_frame_msg[i][1][1] == 'spread'

    def two_hand_judge_thumbUp(self, i):
        return self.stream_frame_msg[i][1][0] == 'thumbUp' and self.stream_frame_msg[i][1][1] == 'thumbUp'

    def one_hand_judge_two(self, i):
        return self.stream_frame_msg[i][1][0] == 'two' and self.stream_frame_msg[i][1][1] == 'null'

    def one_hand_judge_three(self, i):
        return self.stream_frame_msg[i][1][0] == 'three' and self.stream_frame_msg[i][1][1] == 'null'

    def one_hand_judge_four(self, i):
        return self.stream_frame_msg[i][1][0] == 'four' and self.stream_frame_msg[i][1][1] == 'null'

    def one_hand_judge_six(self, i):
        return self.stream_frame_msg[i][1][0] == 'six' and self.stream_frame_msg[i][1][1] == 'null'

    def count_one_hand_delta_x(self, i, j):
        """
        组合计算 - 计算一只手横坐标偏移量
        :param i: 起始手势帧数
        :param j: 终止手势帧数
        :return: 手势关键点水平偏移量 delta_x
        """
        return self.stream_frame_msg[i][0][0][0][0] - self.stream_frame_msg[j][0][0][0][0]

    def count_two_hand_delta_x(self, i, j, hand):
        """
        组合计算 - 计算 两只手中指定手 横坐标偏移量
        :param i: 起始手势帧数
        :param j: 终止手势帧数
        :param hand: 指定手编号
        :return: 指定手势关键点水平偏移量 delta_x
        """
        return self.stream_frame_msg[i][0][hand][0][0] - self.stream_frame_msg[j][0][hand][0][0]

    def count_two_hand_delta_y(self, i, j, hand):
        """
        组合计算 - 计算 两只手中指定手 纵坐标偏移量
        :param i: 起始手势帧数
        :param j: 终止手势帧数
        :param hand: 指定手编号
        :return: 指定手势关键点垂直偏移量 delta_y
        """
        return self.stream_frame_msg[i][0][hand][0][1] - self.stream_frame_msg[j][0][hand][0][1]

    def show_label(self,text):
        """
        打印结果
        :param text: 展示到结果区域的文字
        """
        self.result_label.setText(text)
        self.running_label.setText(u"手势检测中...")

    # ================================== 页面控制 ================================== #
    def close_window(self):
        """ 关闭窗口 """
        if self.cap.isOpened():
            self.cap.release()
        self.close()

if __name__ == "__main__":
    app = QApplication(sys.argv)
    win = WindowLogic()
    win.show()
    sys.exit(app.exec_())
wAAACH5BAEKAAAALAAAAAABAAEAAAICRAEAOw==

你可能感兴趣的:(Python,python,开发语言,算法)