Kinect v2.0原理介绍之十:获取高清面部帧的AU单元特征保存到文件

来源:互联网 发布:灵当软件 编辑:程序博客网 时间:2024/05/01 02:47

~~有兴趣的小伙伴,加kinect算法交流群:462964980。

本程序是在FaceBasics-D2D实例基础上加以修改。
直接上代码吧。
在HRESULT CFaceBasics::InitializeDefaultSensor()函数中,要添加:

// 创建高清面部帧源if (SUCCEEDED(hr)){    hr = CreateHighDefinitionFaceFrameSource(m_pKinectSensor, &m_pHDFaceFrameSource);}// 创建高清面部帧读取器if (SUCCEEDED(hr)){    hr = m_pHDFaceFrameSource->OpenReader(&m_pHDFaceFrameReader);}// 注册临帧事件// 创建面部特征对齐if (SUCCEEDED(hr)){    hr = CreateFaceAlignment(&m_pFaceAlignment);}SafeRelease(m_pHDFaceFrameSource);

在void CFaceBasics::ProcessFaces()函数中需要添加:

// AU单元float               au[FaceShapeAnimations_Count];void CFaceBasics::ProcessFaces(){    HRESULT hr=S_OK;    //GetFaceShapeAnimations();    IBody* ppBodies[BODY_COUNT] = {0};    bool bHaveBodyData = SUCCEEDED( UpdateBodyData(ppBodies) );    if (bHaveBodyData)    {        // 检查是否未被跟踪        BOOLEAN bFaceTracked = false;        //if (SUCCEEDED(hr) )        //{        // check if a valid face is tracked in this face frame        hr = m_pHDFaceFrameReader->get_HighDefinitionFaceFrameSource(&m_pHDFaceFrameSource);        if(SUCCEEDED(hr))        {            hr = m_pHDFaceFrameSource->get_IsTrackingIdValid(&bFaceTracked);        }        //}        // kinect离人脸的距离        double d_face = 10.0;        // 保存离kinect最近人的id        int n_id = 0;        if (SUCCEEDED(hr) && !bFaceTracked)        {            // 只检测离着kinect最近的人            for (int i = 0; i < BODY_COUNT; ++i)             {                hr = ppBodies[i]->get_IsTracked(&bFaceTracked);                if (SUCCEEDED(hr) && bFaceTracked)                {                    Joint joints[JointType_Count];                     hr = ppBodies[i]->GetJoints(_countof(joints), joints);                    if (SUCCEEDED(hr))                    {                        // A 3D location in camera space. 头部离kinect的距离 headJoint.Z;                        CameraSpacePoint headJoint = joints[JointType_Head].Position;                        if(headJoint.Z < d_face)                        {                            d_face = headJoint.Z;                            n_id = i;                        }                    }                    // break;                }            }            hr = ppBodies[n_id]->get_IsTracked(&bFaceTracked);            // 下面很重要,要得到高清面部帧人的id            if (SUCCEEDED(hr) && bFaceTracked)            {                UINT64 id = 0;                ppBodies[n_id]->get_TrackingId(&id);                m_pHDFaceFrameSource->put_TrackingId(id);            }        }    }    for (int i = 0; i < BODY_COUNT; ++i) SafeRelease(ppBodies[i]);    //SafeRelease(m_pHDFaceFrameSource);    if (!m_pColorFrameReader || !m_pBodyFrameReader)    {        return;    }    if (SUCCEEDED(hr) && m_pHDFaceFrameReader != nullptr)    {        IHighDefinitionFaceFrame* pHDFaceFrame = nullptr;               hr = m_pHDFaceFrameReader ->AcquireLatestFrame(&pHDFaceFrame);        if(SUCCEEDED(hr))        {            hr = pHDFaceFrame->GetAndRefreshFaceAlignmentResult(m_pFaceAlignment);        }        if (SUCCEEDED(hr))        {            m_pFaceAlignment->GetAnimationUnits(FaceShapeAnimations_Count,au);            char buffer[1024];            memset(buffer,0,sizeof(char)*1024);            //%f  %f  %f  %f  %f  %f  %f  %f  %f  %f  %f  %f  %f  %f  %f  %f  %f  %f \tT\r\n            //%f\t%f\t%f\t%f\t%f\t%f\t%f\t%f\t%f\t%f\t%f\t%f\t%f\t%f\t%f\t%f\t%f\t%f\tT\r\n            auto length = sprintf(buffer, "%f  %f  %f  %f  %f  %f  %f  %f  %f  %f  %f  %f  %f  %f  %f  %f  %f  %f \tT\r\n",                au[FaceShapeAnimations_JawOpen],                au[FaceShapeAnimations_LipPucker],                au[FaceShapeAnimations_JawSlideRight],                au[FaceShapeAnimations_LipStretcherRight],                au[FaceShapeAnimations_LipStretcherLeft],                au[FaceShapeAnimations_LipCornerPullerLeft],                au[FaceShapeAnimations_LipCornerPullerRight],                au[FaceShapeAnimations_LipCornerDepressorLeft],                au[FaceShapeAnimations_LipCornerDepressorRight],                au[FaceShapeAnimations_LeftcheekPuff],                au[FaceShapeAnimations_RightcheekPuff],                au[FaceShapeAnimations_LefteyeClosed],                au[FaceShapeAnimations_RighteyeClosed],                au[FaceShapeAnimations_RighteyebrowLowerer],                au[FaceShapeAnimations_LefteyebrowLowerer],                au[FaceShapeAnimations_LowerlipDepressorLeft],                au[FaceShapeAnimations_LowerlipDepressorRight]);            std::wstring str = L"au.txt";            std::ofstream file;            // std::ios::app是让其在文件结束添加数据,不会覆盖之前的数据            file.open(str,std::ios::app);            if(file.is_open())            {                //file.seekp(0,std::ios::end);                file.write(buffer,strlen(buffer));            }            file.close();        }        // 安全释放        SafeRelease(pHDFaceFrame);    }}

到这里为止,AU单元的特征向量就生成了,然后就可以根据我们机器学习算法进行分类,或者做一些新的开发。

0 0
原创粉丝点击