OpenNI + OpenCv2的第二个实验手势识别
来源:互联网 发布:数据挖掘的就业前景 编辑:程序博客网 时间:2024/04/28 12:02
还是参照了该大牛同学的文章
Kinect开发教程三:利用OpenNI进行手势识别
不过个人比较喜欢用opencv2 中的东西,改写了下代码,重新贴上来。
OpenNI GestureGenerator的 Callback function形式为
RegisterGestureCallbacks( GestureRecognized RecognizedCB, GestureProgress ProgressCB, void* pCookie, XnCallbackHandle& hCallback)
两个回调函数原型分别为:
void (XN_CALLBACK_TYPE* GestureRecognized)( GestureGenerator& generator, const XnChar* strGesture, const XnPoint3D* pIDPosition, const XnPoint3D* pEndPosition, void* pCookie);void (XN_CALLBACK_TYPE* GestureProgress)( GestureGenerator& generator, const XnChar* strGesture, const XnPoint3D* pPosition, XnFloat fProgress, void* pCookie);
本实验代码如下:
#include "stdafx.h"#include "opencv2/opencv.hpp"#include "XnCppWrapper.h" using namespace cv;using namespace std;// XnPoint3D运算符重载ostream &operator<<( ostream &out, const XnPoint3D &rPoint){out << "(" <<rPoint.X<< "," <<rPoint.Y<< ","<<rPoint.X<<")";return out;}// callback function for gesture recognizedvoid XN_CALLBACK_TYPE gestureRecog( xn::GestureGenerator &generator,const XnChar *strGesture,const XnPoint3D *pIDposition,const XnPoint3D *pEndPosition,void *pCookie){cout<<strGesture <<" from" <<*pIDposition<< " to "<<*pEndPosition<<endl;int imgStartX = 0;int imgStartY = 0;int imgEndX = 0;int imgEndY = 0;char locationInfo[100];imgStartX = (int)(640/2 - pIDposition->X );imgStartY = (int)(480/2 - pIDposition->Y );imgEndX = (int)(640/2 - pEndPosition->X );imgEndY = (int)(480/2 - pEndPosition->Y );Mat refimage(480, 640, CV_8UC3, (uchar *)pCookie);if(strcmp(strGesture, "RaiseHand") == 0){circle(refimage, Point(imgStartX, imgStartY), 1, Scalar(255, 0, 0), 2 );}else if (strcmp(strGesture, "Wave") == 0){line(refimage, Point(imgStartX, imgStartY), Point(imgEndX, imgEndY), Scalar(0, 255, 0), 4);}else if (strcmp(strGesture, "Click") == 0){circle(refimage, Point(imgStartX, imgStartY), 6, Scalar(0, 0, 255), 2 );}Mat imageROI(refimage, Rect(40, 420, 400, 60) );for(int row = 0; row < imageROI.rows; row++ ){uchar *dataPtr = imageROI.ptr<uchar>(row);for(int col = 0; col < imageROI.cols; col++){*dataPtr++ = 255;*dataPtr++ = 255;*dataPtr++ = 255;}}sprintf_s(locationInfo, "From: %d,%d to %d,%d",(int)pIDposition->X,(int)pIDposition->Y,(int)(pEndPosition->X),(int)(pEndPosition->Y) );putText(imageROI,locationInfo,Point(30, 40),FONT_HERSHEY_DUPLEX,0.6,Scalar(255, 0, 255),2,4,false );}void clearImg(Mat &SrcImage){if(SrcImage.channels() == 3){for(int row = 0; row < 480; row++){uchar *dataPtr= SrcImage.ptr<uchar>(row); for(int col = 0; col < 640; col++){*dataPtr++ = 255;*dataPtr++ = 255;*dataPtr++ = 255;}}string handString = "Hand Raise";putText(SrcImage,handString,Point(20, 20),FONT_HERSHEY_DUPLEX,1,Scalar(255, 0, 0),2,4,false );handString = "Hand Wave";putText(SrcImage,handString,Point(20, 50),FONT_HERSHEY_DUPLEX,1,Scalar(0, 255, 0),2,4,false );handString = "Hand Push";putText(SrcImage,handString,Point(20, 80),FONT_HERSHEY_DUPLEX,1,Scalar(0, 0, 255),2,4,false );}else if(SrcImage.channels() == 1){for (int row = 0; row < 480; row++){uchar *dataPtr = SrcImage.ptr<uchar>(row); for(int col = 0; col < 640; col++){*dataPtr++ = 255;}}}}void XN_CALLBACK_TYPE gestureProgress( xn::GestureGenerator &generator,const XnChar *strGesture,const XnPoint3D *pPosition,XnFloat fProgress,void *pCookie){cout << strGesture << ":" << fProgress << " at " << *pPosition << endl;}int main( int argc, char **argv ){Mat drawPadIMg(480, 640, CV_8UC3);Mat cameraImg(480, 640, CV_8UC3);namedWindow("Gesture", WINDOW_AUTOSIZE); namedWindow("Camera", WINDOW_AUTOSIZE);clearImg(drawPadIMg);XnStatus res;char key = 0;xn::Context context;res = context.Init();xn::ImageMetaData imgMD;//create generatexn::ImageGenerator imageGenerator;res = imageGenerator.Create(context);xn::GestureGenerator gestureGenerator;res = gestureGenerator.Create(context);// Add gesturegestureGenerator.AddGesture("Wave", NULL);gestureGenerator.AddGesture("Click", NULL);gestureGenerator.AddGesture("RaiseHand", NULL);// Register callback functions of gesture generatorXnCallbackHandle handle;gestureGenerator.RegisterGestureCallbacks(gestureRecog, gestureProgress, (void *)drawPadIMg.data, handle);//注册手势回调函数gestureRecog gestureProgress 用于识别与处理.如果参数为NULL,则表示不需要处理.pCookie是传入的用户数据.handle用于注销。context.StartGeneratingAll();//开始工作res = context.WaitAndUpdateAll();while( (key != 27) && !(res = context.WaitAndUpdateAll() ) ){if(key=='c'){clearImg(drawPadIMg);}imageGenerator.GetMetaData(imgMD);//convert ImageMetaDate to Mat uchar *imageMDPointer = (uchar *)imgMD.Data(); Mat imageRGB(480, 640, CV_8UC3, imageMDPointer);//Mat(int rows, int cols, int type, void* data, size_t step=AUTO_STEP); cvtColor(imageRGB, cameraImg, CV_RGB2BGR);imshow("Gesture", drawPadIMg);imshow("Camera", cameraImg);key = waitKey(20);}gestureGenerator.UnregisterGestureCallbacks(handle);context.StopGeneratingAll(); context.Release();return 0;}
今天就到这里,明天继续。
0 0
- OpenNI + OpenCv2的第二个实验手势识别
- Kinect的第一个实验OPenNI+OpenCV2
- kinect+openni手势识别
- Kinect+OpenNI学习笔记之5(使用OpenNI自带的类进行简单手势识别)
- Kinect2手势识别/手势跟踪-HandTracker/OpenNI/Linux or Windows
- Kinect+OpenNI学习笔记之12(简单手势所表示的数字的识别)
- 在ubuntu14.04下openni+opencv+kinectV1的学习三:手势识别
- Kinect开发教程三:利用OpenNI进行手势识别
- 利用OpenNI进行手势识别 (小斤)
- Kinect开发教程三:利用OpenNI进行手势识别
- Kinect开发教程三:利用OpenNI进行手势识别
- Kinect开发教程三:利用OpenNI进行手势识别
- Kinect开发教程:利用OpenNI进行手势识别
- OPENNI学习实践-OPENNI体感开发实战中NITE手势识别样例
- android的手势识别
- Android的手势识别
- 手势的识别
- 手势的识别
- 序列化Python对象
- linux虚拟机上不了网的问题!
- 『算法学习笔记』1st day. 顺序结构程序设计
- 游戏服务器之技能
- python学习笔记1语句
- OpenNI + OpenCv2的第二个实验手势识别
- 关于PHP乱码的研究
- Android私有文件资源文件的存取
- java基础面试题
- 新年好啊
- C++:在函数parameter passed by reference时,如果可能,尽量使其是reference to const
- linux安装db2
- liunx服务器配置
- Java阻塞队列的实现