还是参照了该大牛同学的文章
不过个人比较喜欢用opencv2 中的东西,改写了下代码,重新贴上来。
OpenNI GestureGenerator的 Callback function形式为
RegisterGestureCallbacks( GestureRecognized RecognizedCB,
GestureProgress ProgressCB,
void* pCookie,
XnCallbackHandle& hCallback)
两个回调函数原型分别为:
void (XN_CALLBACK_TYPE* GestureRecognized)( GestureGenerator& generator,
const XnChar* strGesture,
const XnPoint3D* pIDPosition,
const XnPoint3D* pEndPosition,
void* pCookie);
void (XN_CALLBACK_TYPE* GestureProgress)( GestureGenerator& generator,
const XnChar* strGesture,
const XnPoint3D* pPosition,
XnFloat fProgress,
void* pCookie);
本实验代码如下:
#include "stdafx.h"
#include "opencv2/opencv.hpp"
#include "XnCppWrapper.h"
using namespace cv;
using namespace std;
// XnPoint3D运算符重载
ostream &operator<<( ostream &out, const XnPoint3D &rPoint)
{
out << "(" <<rPoint.X<< "," <<rPoint.Y<< ","<<rPoint.X<<")";
return out;
}
// callback function for gesture recognized
void XN_CALLBACK_TYPE gestureRecog( xn::GestureGenerator &generator,
const XnChar *strGesture,
const XnPoint3D *pIDposition,
const XnPoint3D *pEndPosition,
void *pCookie)
{
cout<<strGesture <<" from" <<*pIDposition<< " to "<<*pEndPosition<<endl;
int imgStartX = 0;
int imgStartY = 0;
int imgEndX = 0;
int imgEndY = 0;
char locationInfo[100];
imgStartX = (int)(640/2 - pIDposition->X );
imgStartY = (int)(480/2 - pIDposition->Y );
imgEndX = (int)(640/2 - pEndPosition->X );
imgEndY = (int)(480/2 - pEndPosition->Y );
Mat refimage(480, 640, CV_8UC3, (uchar *)pCookie);
if(strcmp(strGesture, "RaiseHand") == 0)
{
circle(refimage, Point(imgStartX, imgStartY), 1, Scalar(255, 0, 0), 2 );
}
else if (strcmp(strGesture, "Wave") == 0)
{
line(refimage, Point(imgStartX, imgStartY), Point(imgEndX, imgEndY), Scalar(0, 255, 0), 4);
}
else if (strcmp(strGesture, "Click") == 0)
{
circle(refimage, Point(imgStartX, imgStartY), 6, Scalar(0, 0, 255), 2 );
}
Mat imageROI(refimage, Rect(40, 420, 400, 60) );
for(int row = 0; row < imageROI.rows; row++ )
{
uchar *dataPtr = imageROI.ptr<uchar>(row);
for(int col = 0; col < imageROI.cols; col++)
{
*dataPtr++ = 255;
*dataPtr++ = 255;
*dataPtr++ = 255;
}
}
sprintf_s(locationInfo, "From: %d,%d to %d,%d",(int)pIDposition->X,(int)pIDposition->Y,(int)(pEndPosition->X),(int)(pEndPosition->Y) );
putText(imageROI,
locationInfo,
Point(30, 40),
FONT_HERSHEY_DUPLEX,
0.6,
Scalar(255, 0, 255),
2,
4,
false );
}
void clearImg(Mat &SrcImage)
{
if(SrcImage.channels() == 3)
{
for(int row = 0; row < 480; row++)
{
uchar *dataPtr= SrcImage.ptr<uchar>(row);
for(int col = 0; col < 640; col++)
{
*dataPtr++ = 255;
*dataPtr++ = 255;
*dataPtr++ = 255;
}
}
string handString = "Hand Raise";
putText(SrcImage,
handString,
Point(20, 20),
FONT_HERSHEY_DUPLEX,
1,
Scalar(255, 0, 0),
2,
4,
false );
handString = "Hand Wave";
putText(SrcImage,
handString,
Point(20, 50),
FONT_HERSHEY_DUPLEX,
1,
Scalar(0, 255, 0),
2,
4,
false );
handString = "Hand Push";
putText(SrcImage,
handString,
Point(20, 80),
FONT_HERSHEY_DUPLEX,
1,
Scalar(0, 0, 255),
2,
4,
false );
}
else if(SrcImage.channels() == 1)
{
for (int row = 0; row < 480; row++)
{
uchar *dataPtr = SrcImage.ptr<uchar>(row);
for(int col = 0; col < 640; col++)
{
*dataPtr++ = 255;
}
}
}
}
void XN_CALLBACK_TYPE gestureProgress( xn::GestureGenerator &generator,
const XnChar *strGesture,
const XnPoint3D *pPosition,
XnFloat fProgress,
void *pCookie)
{
cout << strGesture << ":" << fProgress << " at " << *pPosition << endl;
}
int main( int argc, char **argv )
{
Mat drawPadIMg(480, 640, CV_8UC3);
Mat cameraImg(480, 640, CV_8UC3);
namedWindow("Gesture", WINDOW_AUTOSIZE);
namedWindow("Camera", WINDOW_AUTOSIZE);
clearImg(drawPadIMg);
XnStatus res;
char key = 0;
xn::Context context;
res = context.Init();
xn::ImageMetaData imgMD;
//create generate
xn::ImageGenerator imageGenerator;
res = imageGenerator.Create(context);
xn::GestureGenerator gestureGenerator;
res = gestureGenerator.Create(context);
// Add gesture
gestureGenerator.AddGesture("Wave", NULL);
gestureGenerator.AddGesture("Click", NULL);
gestureGenerator.AddGesture("RaiseHand", NULL);
// Register callback functions of gesture generator
XnCallbackHandle handle;
gestureGenerator.RegisterGestureCallbacks(gestureRecog, gestureProgress, (void *)drawPadIMg.data, handle);
//注册手势回调函数gestureRecog gestureProgress 用于识别与处理.如果参数为NULL,则表示不需要处理.pCookie是传入的用户数据.handle用于注销。
context.StartGeneratingAll();//开始工作
res = context.WaitAndUpdateAll();
while( (key != 27) && !(res = context.WaitAndUpdateAll() ) )
{
if(key=='c')
{
clearImg(drawPadIMg);
}
imageGenerator.GetMetaData(imgMD);
//convert ImageMetaDate to Mat
uchar *imageMDPointer = (uchar *)imgMD.Data();
Mat imageRGB(480, 640, CV_8UC3, imageMDPointer);//Mat(int rows, int cols, int type, void* data, size_t step=AUTO_STEP);
cvtColor(imageRGB, cameraImg, CV_RGB2BGR);
imshow("Gesture", drawPadIMg);
imshow("Camera", cameraImg);
key = waitKey(20);
}
gestureGenerator.UnregisterGestureCallbacks(handle);
context.StopGeneratingAll();
context.Release();
return 0;
}
今天就到这里,明天继续。