Kinect&OpenNI开发(二) OpenCV实时显示手部位置数据

本文介绍了如何简化四五年前的Kinect与OpenNI开发教程中的复杂步骤,通过一种黑科技实现无需姿势检测和骨骼校正,直接获取并用OpenCV实时显示手部位置数据。

摘要生成于 C知道 ,由 DeepSeek-R1 满血版支持, 前往体验 >

       鉴于网上的教程都是四五年前的东西,还必须借用姿势检测触发骨骼校正才能进行骨骼数据的采集,非常的麻烦,所以我进行了代码的简化,并且根据百度文库中的一篇文章中的黑科技实现了不需要校正直接进行骨骼采集.

/*
 * SkeletonViewer.cpp
 */
    #include <stdlib.h>
	#include <stdio.h>
    #include <iostream>
    #include <vector>
	#include <string>

    #include <XnCppWrapper.h>//OpenNI的文件头
    #include <XnModuleCppInterface.h>
    #include "cv.h"
    #include "highgui.h"

    using namespace std;
    using namespace cv;

    // Generator
    xn::UserGenerator userGenerator;//检测新User的出现或离开,注册回调函数的方式,一旦其检测到了动静(如人物出现),那么相应的回调函数就会被调用
    xn::DepthGenerator depthGenerator;//深度图像
    xn::ImageGenerator imageGenerator;//彩色图像

    /*
      XN_SKEL_HEAD				 = 1,    XN_SKEL_NECK            				= 2,
      XN_SKEL_TORSO					= 3,    XN_SKEL_WAIST           				= 4,
      XN_SKEL_LEFT_COLLAR		= 5,    XN_SKEL_LEFT_SHOULDER     	= 6,
      XN_SKEL_LEFT_ELBOW		= 7,  XN_SKEL_LEFT_WRIST        			= 8,
      XN_SKEL_LEFT_HAND    		= 9,    XN_SKEL_LEFT_FINGERTIP    	=10,
      XN_SKEL_RIGHT_COLLAR =11,    XN_SKEL_RIGHT_SHOULDER =12,
      XN_SKEL_RIGHT_ELBOW  =13,  XN_SKEL_RIGHT_WRIST				=14,
      XN_SKEL_RIGHT_HAND   	=15,    XN_SKEL_RIGHT_FINGERTIP =16,
      XN_SKEL_LEFT_HIP          	=17,    XN_SKEL_LEFT_KNEE            	=18,
      XN_SKEL_LEFT_ANKLE       =19,  XN_SKEL_LEFT_FOOT            		=20,
      XN_SKEL_RIGHT_HIP          =21,    XN_SKEL_RIGHT_KNEE          	=22,
      XN_SKEL_RIGHT_ANKLE    =23,    XN_SKEL_RIGHT_FOOT          	=24
    */
    //a line will be drawn between start point and corresponding end point, only use the right hand
    int startSkelPoints[2]={6,7};
    int endSkelPoints[2]={7,9};

    CvPoint IndicatorLightPoint = cvPoint(550,50);    //手部跟踪开始指示灯位置
    bool     handistracking = false;

    //手部位置浮点转字符储存所用数组,不能用char*
    char HandPosition[15]="Right Hand";
    char HandPositionX[10];
    char HandPositionY[10];
    char HandPositionZ[10];

    // callback function of user generator: new user
    void XN_CALLBACK_TYPE NewUser( xn::UserGenerator& generator, XnUserID user,void* pCookie )
    {
        cout << "New user identified: " << user << endl;
        generator.GetSkeletonCap().RequestCalibration( user, true );
    }

    // callback function of user generator: lost user
    void XN_CALLBACK_TYPE LostUser( xn::UserGenerator& generator, XnUserID user,void* pCookie )
    {
        cout << "User " << user << " lost" << endl;
    }

    // callback function of skeleton: calibration end
    void XN_CALLBACK_TYPE CalibrationEnd( xn::SkeletonCapability& skeleton,XnUserID user,XnCalibrationStatus calibrationError,void* pCookie )
    {
        cout << "Calibration complete for user " <<  user << ", ";
        if( calibrationError==XN_CALIBRATION_STATUS_OK )
        {
            cout << "Success" << endl;
            skeleton.StartTracking( user );
        }
        else
        {
            cout << "Failure" << endl;
            skeleton.RequestCalibration(user,true);
        }
    }

    void clearImg(IplImage* inputimg)
    {
        CvFont font;
        cvInitFont( &font, CV_FONT_VECTOR0,1, 1, 0, 3, 5);
        memset(inputimg->imageData,255,640*480*3);
    }

    int main( int argc, char** argv )
    {
    	char key=0;
    	xn::ImageMetaData imageMD;// ImageData

    	// initial context
        xn::Context context;
        context.Init();

        //utilize OpenCV initial window
        IplImage* cameraImg=cvCreateImage(cvSize(640,480),IPL_DEPTH_8U,3);
        cvNamedWindow("Camera",1);

        // map output mode
        XnMapOutputMode mapMode;
        mapMode.nXRes = 640;
        mapMode.nYRes = 480;
        mapMode.nFPS = 60;

        // create generator
        depthGenerator.Create( context );
        depthGenerator.SetMapOutputMode( mapMode );
        imageGenerator.Create( context );
        userGenerator.Create( context );

        // Register callback functions of user generator
        XnCallbackHandle userCBHandle;
        userGenerator.RegisterUserCallbacks( NewUser, LostUser, NULL, userCBHandle );

        // Register callback functions of skeleton capability
        xn::SkeletonCapability skeletonCap = userGenerator.GetSkeletonCap();
        skeletonCap.SetSkeletonProfile( XN_SKEL_PROFILE_ALL );
        XnCallbackHandle calibCBHandle;
        skeletonCap.RegisterToCalibrationComplete( CalibrationEnd,&userGenerator, calibCBHandle );

        // start generate data
        context.StartGeneratingAll();
        while( key!=27 )  //ESC,ASCⅡ码27,按下ESC键跳出循环
        {
            context.WaitAndUpdateAll();

            //复制彩色摄像头数据至OpenCV窗口
            imageGenerator.GetMetaData(imageMD);
            memcpy(cameraImg->imageData,imageMD.Data(),640*480*3);//复制彩色图像数组数据
            cvCvtColor(cameraImg,cameraImg,CV_RGB2BGR);

            // get users
            XnUInt16 userCounts = userGenerator.GetNumberOfUsers();
            if( userCounts > 0 )
            {
                XnUserID* userID = new XnUserID[userCounts];
                userGenerator.GetUsers( userID, userCounts );

                for( int i = 0; i < userCounts; ++i )
                {
                    // if is tracking skeleton
                    if( skeletonCap.IsTracking( userID[i] ) )
                    {
                        XnPoint3D skelPointsIn[24],skelPointsOut[24];
                        XnSkeletonJointTransformation mJointTran;

                        /******status ******/
                        handistracking = true;

                        //获取关节数据
                        for(int iter=0;iter<24;iter++)
                        {
                            //XnSkeletonJoint from 1 to 24
                            skeletonCap.GetSkeletonJoint( userID[i],XnSkeletonJoint(iter+1), mJointTran );
                            skelPointsIn[iter]=mJointTran.position.position;
                        }
                        depthGenerator.ConvertRealWorldToProjective(24,skelPointsIn,skelPointsOut);

                        //手部位置浮点转字符串
                        gcvt(skelPointsIn[15].X/10,4,HandPositionX);
                        gcvt(skelPointsIn[15].Y/10,4,HandPositionY);
                        gcvt(skelPointsIn[15].Z/10,4,HandPositionZ);
                        //手部位置显示
                        CvFont font;
                        cvInitFont( &font, CV_FONT_VECTOR0,1, 1, 0, 3, 5);
                        cvPutText(cameraImg, HandPosition ,cvPoint(20, 30), &font, CV_RGB(255,0,0));
                        cvPutText(cameraImg, HandPositionX,cvPoint(20, 60), &font, CV_RGB(255,0,0));
                        cvPutText(cameraImg, HandPositionY,cvPoint(20, 90), &font, CV_RGB(255,0,0));
                        cvPutText(cameraImg, HandPositionZ,cvPoint(20, 120), &font, CV_RGB(255,0,0));

                        //显示图像中左边的手(若是面对摄像头则为人的右手)
                        for(int d=0;d<2;d++)
                        {
                            CvPoint startpoint = cvPoint(skelPointsOut[startSkelPoints[d]-1].X,skelPointsOut[startSkelPoints[d]-1].Y);
                            CvPoint endpoint = cvPoint(skelPointsOut[endSkelPoints[d]-1].X,skelPointsOut[endSkelPoints[d]-1].Y);

                            cvCircle(cameraImg,startpoint,3,CV_RGB(0,0,255),12);
                            cvCircle(cameraImg,endpoint,3,CV_RGB(0,0,255),12);
                            cvLine(cameraImg,startpoint,endpoint,CV_RGB(0,0,255),4);
                        }
                    }
                    else
                    {
                    	/******status ******/
                    	handistracking = false;
                    }
                }
               delete [] userID;
            }
            //手部跟踪指示灯显示
            if(handistracking){
            	cvCircle(cameraImg,IndicatorLightPoint,15,CV_RGB(0,255,0),-1);//手部跟踪指示灯绿
            }
            else{
            	cvCircle(cameraImg,IndicatorLightPoint,15,CV_RGB(255,0,0),-1);//手部跟踪指示灯红
            }

            //OpenCV show image
            cvShowImage("Camera",cameraImg);

            /*
             * int cvWaitKey( int delay=0 )
             *返回值为int型,函数的参数为int型,
             *当delay<=0的时候,如果没有键盘触发,则一直等待,此时的返回值为-1,否则返回值为键盘按下的码字;
             *当delay>  0时,如果没有键盘的的触发,则等待delay的时间,此时的返回值是-1,否则返回值为键盘按下的码字.
             */

            key=cvWaitKey(20); //让每一帧暂停20ms
        }
        // stop and shutdown kinect
        cvDestroyWindow("Camera");
        cvReleaseImage(&cameraImg);
        context.StopGeneratingAll();
        context.Shutdown();

        return 0;
    }

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值