Kinect V2开发(5)绘制骨架图

来源:互联网 发布:百度数据研发部 编辑:程序博客网 时间:2024/06/05 14:48

上一篇中已经读出了关节数据,这一次主要是在OpenCV中给各个关节点连线,绘制骨架图。
在上一篇中我们已经知道,关节点的位置信息是在CameraSpacePoint即摄像机空间坐标系中,要在OpenCV中显示的话,我们需要进行坐标转换,转换到深度空间坐标系或者彩色空间坐标系都可以,坐标转换完成之后就两个关节之间连线就可以。
这里写图片描述
代码如下:

#include <iostream>#include <opencv2/core.hpp>#include <opencv2/imgproc.hpp>#include <opencv2/highgui.hpp>#include <Kinect.h>using namespace std;using namespace cv;void DrawLine(Mat& Img, const Joint& r1, const Joint& r2, ICoordinateMapper* pMapper){    //用两个关节点来做线段的两端,并且进行状态过滤    if (r1.TrackingState == TrackingState_NotTracked || r2.TrackingState == TrackingState_NotTracked)        return;    //要把关节点用的摄像机坐标下的点转换成彩色空间的点    ColorSpacePoint p1, p2;    pMapper->MapCameraPointToColorSpace(r1.Position, &p1);    pMapper->MapCameraPointToColorSpace(r2.Position, &p2);    line(Img, Point(p1.X, p1.Y), Point(p2.X, p2.Y), Vec3b(0, 0, 255), 5);}int main(void){    // 1a. 获取传感器    IKinectSensor* pSensor = nullptr;    GetDefaultKinectSensor(&pSensor);    // 1b. 打开传感器    pSensor->Open();    //******************* 2. 彩色图像读取到图像矩阵中******************        IColorFrameSource* pFrameSource = nullptr;        pSensor->get_ColorFrameSource(&pFrameSource);        int     iWidth = 0, iHeight = 0;        IFrameDescription* pFrameDescription = nullptr;        pFrameSource->get_FrameDescription(&pFrameDescription);            pFrameDescription->get_Width(&iWidth);            pFrameDescription->get_Height(&iHeight);        IColorFrameReader* pColorFrameReader = nullptr;        pFrameSource->OpenReader(&pColorFrameReader);        pFrameDescription->Release();        pFrameDescription = nullptr;        pFrameSource->Release();        pFrameSource = nullptr;        // Prepare OpenCV data        UINT uBufferSize = 0;        Mat mColorImg (iHeight, iWidth, CV_8UC4);        uBufferSize = iHeight * iWidth * 4 * sizeof(BYTE);    // *******************3. 读取关节数据************************        IBodyFrameReader* pBodyFrameReader = nullptr;        IBody** aBodyData = nullptr;        INT32 iBodyCount = 0;        IBodyFrameSource* pBodySource = nullptr;        pSensor->get_BodyFrameSource(&pBodySource);        pBodySource->get_BodyCount(&iBodyCount);        aBodyData = new IBody*[iBodyCount];        for (int i = 0; i < iBodyCount; ++i)            aBodyData[i] = nullptr;        pBodySource->OpenReader(&pBodyFrameReader);        pBodySource->Release();        pBodySource = nullptr;    // *************************4.准备坐标转换*************************    ICoordinateMapper* pCoordinateMapper = nullptr;    pSensor->get_CoordinateMapper(&pCoordinateMapper);    namedWindow("Body Image");    while (1)    {        // 4a. 读取彩色图像并输出到矩阵        IColorFrame* pColorFrame = nullptr;        if (pColorFrameReader->AcquireLatestFrame(&pColorFrame) == S_OK)        {            pColorFrame->CopyConvertedFrameDataToArray(uBufferSize, mColorImg.data, ColorImageFormat_Bgra);            pColorFrame->Release();        }        //Mat mImg = mColorImg.clone();        Mat mImg(iHeight, iWidth, CV_8UC4);;        // 4b. 读取Body数据并输出到数组        IBodyFrame* pBodyFrame = nullptr;        if (pBodyFrameReader->AcquireLatestFrame(&pBodyFrame) == S_OK)        {            // 4b1. 获取身体数据            if (pBodyFrame->GetAndRefreshBodyData(iBodyCount, aBodyData) == S_OK)            {                // 4b2. 遍历每个人                for (int i = 0; i < iBodyCount; ++i)                {                    IBody* pBody = aBodyData[i];                    // 4b3. 确认追踪状态                    BOOLEAN bTracked = false;                    if ((pBody->get_IsTracked(&bTracked) == S_OK) && bTracked)                    {                        // 4b4.获取关节                        Joint aJoints[JointType::JointType_Count];                        if (pBody->GetJoints(JointType::JointType_Count, aJoints) == S_OK)                        {                            DrawLine(mImg, aJoints[JointType_SpineBase], aJoints[JointType_SpineMid], pCoordinateMapper);                            DrawLine(mImg, aJoints[JointType_SpineMid], aJoints[JointType_SpineShoulder], pCoordinateMapper);                            DrawLine(mImg, aJoints[JointType_SpineShoulder], aJoints[JointType_Neck], pCoordinateMapper);                            DrawLine(mImg, aJoints[JointType_Neck], aJoints[JointType_Head], pCoordinateMapper);                            DrawLine(mImg, aJoints[JointType_SpineShoulder], aJoints[JointType_ShoulderLeft], pCoordinateMapper);                            DrawLine(mImg, aJoints[JointType_ShoulderLeft], aJoints[JointType_ElbowLeft], pCoordinateMapper);                            DrawLine(mImg, aJoints[JointType_ElbowLeft], aJoints[JointType_WristLeft], pCoordinateMapper);                            DrawLine(mImg, aJoints[JointType_WristLeft], aJoints[JointType_HandLeft], pCoordinateMapper);                            DrawLine(mImg, aJoints[JointType_HandLeft], aJoints[JointType_HandTipLeft], pCoordinateMapper);                            DrawLine(mImg, aJoints[JointType_HandLeft], aJoints[JointType_ThumbLeft], pCoordinateMapper);                            DrawLine(mImg, aJoints[JointType_SpineShoulder], aJoints[JointType_ShoulderRight], pCoordinateMapper);                            DrawLine(mImg, aJoints[JointType_ShoulderRight], aJoints[JointType_ElbowRight], pCoordinateMapper);                            DrawLine(mImg, aJoints[JointType_ElbowRight], aJoints[JointType_WristRight], pCoordinateMapper);                            DrawLine(mImg, aJoints[JointType_WristRight], aJoints[JointType_HandRight], pCoordinateMapper);                            DrawLine(mImg, aJoints[JointType_HandRight], aJoints[JointType_HandTipRight], pCoordinateMapper);                            DrawLine(mImg, aJoints[JointType_HandRight], aJoints[JointType_ThumbRight], pCoordinateMapper);                            DrawLine(mImg, aJoints[JointType_SpineBase], aJoints[JointType_HipLeft], pCoordinateMapper);                            DrawLine(mImg, aJoints[JointType_HipLeft], aJoints[JointType_KneeLeft], pCoordinateMapper);                            DrawLine(mImg, aJoints[JointType_KneeLeft], aJoints[JointType_AnkleLeft], pCoordinateMapper);                            DrawLine(mImg, aJoints[JointType_AnkleLeft], aJoints[JointType_FootLeft], pCoordinateMapper);                            DrawLine(mImg, aJoints[JointType_SpineBase], aJoints[JointType_HipRight], pCoordinateMapper);                            DrawLine(mImg, aJoints[JointType_HipRight], aJoints[JointType_KneeRight], pCoordinateMapper);                            DrawLine(mImg, aJoints[JointType_KneeRight], aJoints[JointType_AnkleRight], pCoordinateMapper);                            DrawLine(mImg, aJoints[JointType_AnkleRight], aJoints[JointType_FootRight], pCoordinateMapper);                        }                    }                }            }            else            {                cerr << "Can't read body data" << endl;            }            // 4e. 释放bodyframe            pBodyFrame->Release();        }        // 输出图像        imshow("Body Image", mImg);        if (waitKey(30) == VK_ESCAPE) {            break;        }    }    delete[] aBodyData;    // 3.释放frame reader    cout << "Release body frame reader" << endl;    pBodyFrameReader->Release();    pBodyFrameReader = nullptr;    // 2. 释放 color frame reader    cout << "Release color frame reader" << endl;    pColorFrameReader->Release();    pColorFrameReader = nullptr;    // 1c.关闭Sensor    cout << "close sensor" << endl;    pSensor->Close();    // 1d. 释放Sensor    cout << "Release sensor" << endl;    pSensor->Release();    pSensor = nullptr;    return 0;}
阅读全文
0 0
原创粉丝点击