OpenNI + OpenCv2的第二个实验手势识别

本文介绍如何结合OpenNI和OpenCV实现手势识别。通过重写回调函数处理手势信号,如挥手、点击和举手,并在图像上绘制相应的标记。文章提供了完整的代码示例。

摘要生成于 C知道 ,由 DeepSeek-R1 满血版支持, 前往体验 >

还是参照了该大牛同学的文章

Kinect开发教程三:利用OpenNI进行手势识别

不过个人比较喜欢用opencv2 中的东西,改写了下代码,重新贴上来。


OpenNI GestureGenerator的 Callback function形式为

RegisterGestureCallbacks(       GestureRecognized RecognizedCB,
                                GestureProgress   ProgressCB,
                                void*   pCookie,
                                XnCallbackHandle& hCallback)


两个回调函数原型分别为:

void (XN_CALLBACK_TYPE* GestureRecognized)(     GestureGenerator& generator,
                                                const XnChar*  strGesture,
                                                const XnPoint3D*  pIDPosition,
                                                const XnPoint3D*  pEndPosition,
                                                void* pCookie);

void (XN_CALLBACK_TYPE* GestureProgress)(       GestureGenerator& generator,
                                                const XnChar*   strGesture,
                                                const XnPoint3D*  pPosition,
                                                XnFloat  fProgress,
                                                void*  pCookie); 


本实验代码如下:


#include "stdafx.h"
#include "opencv2/opencv.hpp"
#include "XnCppWrapper.h"  

using namespace cv;
using namespace std;

// XnPoint3D运算符重载
ostream &operator<<( ostream &out, const XnPoint3D &rPoint)
{
	out << "(" <<rPoint.X<< "," <<rPoint.Y<< ","<<rPoint.X<<")";
	return out;
}

// callback function for gesture recognized
void XN_CALLBACK_TYPE gestureRecog( xn::GestureGenerator &generator,
									const XnChar *strGesture,
									const XnPoint3D *pIDposition,
									const XnPoint3D *pEndPosition,
									void *pCookie)
{
	cout<<strGesture <<" from" <<*pIDposition<< " to "<<*pEndPosition<<endl;

	int imgStartX = 0;
	int imgStartY = 0;
	int imgEndX = 0;
	int imgEndY = 0;
	char locationInfo[100];

	imgStartX = (int)(640/2 - pIDposition->X );
	imgStartY = (int)(480/2 - pIDposition->Y );
	imgEndX = (int)(640/2 - pEndPosition->X );
	imgEndY = (int)(480/2 - pEndPosition->Y );

	Mat refimage(480, 640, CV_8UC3, (uchar *)pCookie);
	
	if(strcmp(strGesture, "RaiseHand") == 0)
	{
		circle(refimage, Point(imgStartX, imgStartY), 1, Scalar(255, 0, 0), 2 );
	}
	else if (strcmp(strGesture, "Wave") == 0)
	{
		line(refimage, Point(imgStartX, imgStartY), Point(imgEndX, imgEndY), Scalar(0, 255, 0), 4);
	}
	else if (strcmp(strGesture, "Click") == 0)
	{
		circle(refimage, Point(imgStartX, imgStartY), 6, Scalar(0, 0, 255), 2 );
	}

	Mat imageROI(refimage, Rect(40, 420, 400, 60) );
	for(int row = 0; row < imageROI.rows; row++ )
	{
		uchar *dataPtr = imageROI.ptr<uchar>(row);
		for(int col = 0; col < imageROI.cols; col++)
		{
			*dataPtr++ = 255;
			*dataPtr++ = 255;
			*dataPtr++ = 255;
		}
	}

	sprintf_s(locationInfo, "From: %d,%d to %d,%d",(int)pIDposition->X,(int)pIDposition->Y,(int)(pEndPosition->X),(int)(pEndPosition->Y) );
	putText(imageROI,
			locationInfo,
			Point(30, 40),
			FONT_HERSHEY_DUPLEX,
			0.6,
			Scalar(255, 0, 255),
			2,
			4,
			false );
}

void clearImg(Mat &SrcImage)
{
	if(SrcImage.channels() == 3)
	{
		for(int row = 0; row < 480; row++)
		{
			uchar *dataPtr=  SrcImage.ptr<uchar>(row); 
			for(int col = 0; col < 640; col++)
			{
				*dataPtr++ = 255;
				*dataPtr++ = 255;
				*dataPtr++ = 255;
			}
		}
		string handString = "Hand Raise";
		putText(SrcImage,
				handString,
				Point(20, 20),
				FONT_HERSHEY_DUPLEX,
				1,
				Scalar(255, 0, 0),
				2,
				4,
				false );

		handString = "Hand Wave";
		putText(SrcImage,
				handString,
				Point(20, 50),
				FONT_HERSHEY_DUPLEX,
				1,
				Scalar(0, 255, 0),
				2,
				4,
				false );

		handString = "Hand Push";
		putText(SrcImage,
				handString,
				Point(20, 80),
				FONT_HERSHEY_DUPLEX,
				1,
				Scalar(0, 0, 255),
				2,
				4,
				false );


	}
	else if(SrcImage.channels() == 1)
	{
		for (int row = 0; row < 480; row++)
		{
			uchar *dataPtr = SrcImage.ptr<uchar>(row); 
			for(int col = 0; col < 640; col++)
			{
				*dataPtr++ = 255;				
			}
		}
	}
	
}


void XN_CALLBACK_TYPE gestureProgress(  xn::GestureGenerator &generator,
										const XnChar *strGesture,
										const XnPoint3D *pPosition,
										XnFloat fProgress,
										void *pCookie)
{
	cout << strGesture << ":" << fProgress << " at " << *pPosition << endl;
}



int main( int argc, char **argv )
{
	Mat drawPadIMg(480, 640, CV_8UC3);
	Mat cameraImg(480, 640, CV_8UC3);

	namedWindow("Gesture", WINDOW_AUTOSIZE); 
	namedWindow("Camera", WINDOW_AUTOSIZE);
	
	clearImg(drawPadIMg);
	
	XnStatus res;
	char key = 0;

	xn::Context context;
	res = context.Init();

	xn::ImageMetaData imgMD;

	//create generate
	xn::ImageGenerator imageGenerator;
	res = imageGenerator.Create(context);

	xn::GestureGenerator gestureGenerator;
	res = gestureGenerator.Create(context);

	// Add gesture
	gestureGenerator.AddGesture("Wave", NULL);
	gestureGenerator.AddGesture("Click", NULL);
	gestureGenerator.AddGesture("RaiseHand", NULL);

	// Register callback functions of gesture generator
	XnCallbackHandle handle;
	gestureGenerator.RegisterGestureCallbacks(gestureRecog, gestureProgress, (void *)drawPadIMg.data, handle);
	//注册手势回调函数gestureRecog  gestureProgress 用于识别与处理.如果参数为NULL,则表示不需要处理.pCookie是传入的用户数据.handle用于注销。
	context.StartGeneratingAll();//开始工作
	res = context.WaitAndUpdateAll();

	while( (key != 27) && !(res = context.WaitAndUpdateAll() )  )
	{
		if(key=='c')
		{
			clearImg(drawPadIMg);
		}
		imageGenerator.GetMetaData(imgMD);

		//convert ImageMetaDate to Mat  
		uchar *imageMDPointer = (uchar *)imgMD.Data();  
		Mat imageRGB(480, 640, CV_8UC3, imageMDPointer);//Mat(int rows, int cols, int type, void* data, size_t step=AUTO_STEP);  
		cvtColor(imageRGB, cameraImg, CV_RGB2BGR);

		imshow("Gesture", drawPadIMg);
		imshow("Camera", cameraImg);

		key = waitKey(20);
	}
	gestureGenerator.UnregisterGestureCallbacks(handle);
	context.StopGeneratingAll();  
	context.Release();
	return 0;
}

今天就到这里,明天继续。 







                
评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值