带Kinect的英特尔骨骼手跟踪库

时间:2014-04-02 14:20:12

标签: kinect tracking intel

我发现了这个 - > http://software.intel.com/en-us/articles/the-intel-skeletal-hand-tracking-library-experimental-release

我认为这是一个很好的...问题是我不想购买“创意互动手势相机”......相反,我想使用我的Kinect。 这不是那么容易,我需要你的帮助。我从Init()开始 这个是原始功能:

inline bool Tracker::Init()
{
    if(PXCSession_Create(session.ReleaseRef()) < PXC_STATUS_NO_ERROR || !session.IsValid()) return false;

    for(int i=0; ; ++i) // For valid capture contexts
    { 
        PXCSession::ImplDesc desc, filter = { PXCSession::IMPL_GROUP_SENSOR, PXCSession::IMPL_SUBGROUP_VIDEO_CAPTURE };
        if(session->QueryImpl(&filter, i, &desc) < PXC_STATUS_NO_ERROR) break;
        if(session->CreateImpl(&desc, PXCCapture::CUID, (void**)capture.ReleaseRef()) < PXC_STATUS_NO_ERROR || !capture.IsValid()) continue;

        for(int j=0; ; ++j) // For valid devices
        { 
            PXCCapture::DeviceInfo dinfo;
            if(capture->QueryDevice(j, &dinfo) < PXC_STATUS_NO_ERROR) break;
            if(capture->CreateDevice(j, device.ReleaseRef()) < PXC_STATUS_NO_ERROR || !device.IsValid()) continue;

            for(int k=0; ; ++k) // For valid video streams
            { 
                PXCCapture::Device::StreamInfo sinfo; 
                if(device->QueryStream(k, &sinfo) < PXC_STATUS_NO_ERROR) break;
                if(sinfo.cuid != PXCCapture::VideoStream::CUID || device->CreateStream(k, PXCCapture::VideoStream::CUID, (void**)stream.ReleaseRef()) < PXC_STATUS_NO_ERROR || !device.IsValid()) continue;

                for (int m=0; ; ++m) // For depth buffer profiles of at least 60 FPS
                { 
                    PXCCapture::VideoStream::ProfileInfo pinfo;
                    if(stream->QueryProfile(m, &pinfo) < PXC_STATUS_NO_ERROR) break;
                    if(pinfo.imageInfo.format != PXCImage::IMAGE_TYPE_DEPTH || pinfo.frameRateMin.numerator / pinfo.frameRateMin.denominator < 60 || stream->SetProfile(&pinfo) < PXC_STATUS_NO_ERROR) continue;

                    // If we can read at least one frame
                    stream->ReadStreamAsync(image.ReleaseRef(), sp.ReleaseRef());
                    if(sp && sp->Synchronize() >= PXC_STATUS_NO_ERROR)
                    {
                        // Obtain useful properties and reserve room for local copies of depth and color images
                        dimx = pinfo.imageInfo.width; dimy = pinfo.imageInfo.height;
                        PXCPointF32 flen; device->QueryPropertyAsPoint(PXCCapture::Device::PROPERTY_DEPTH_FOCAL_LENGTH, &flen);
                        fovx = atan(dimx / (flen.x*2))*2; fovy = atan(dimy / (flen.y*2))*2;
                        color = new unsigned char[dimx*dimy*3];
                        depth = new unsigned short[dimx*dimy];

                        // Initialize tracking library
                        tracker = hsklCreateTracker(HSKL_COORDS_X_RIGHT_Y_DOWN_Z_FWD, HSKL_API_VERSION);
                        hsklSetSensorProperties(tracker, HSKL_SENSOR_CREATIVE, dimx, dimy, fovx, fovy);
                        return true;
                    }
                }
                stream.ReleaseRef();
            }
            device.ReleaseRef();
        }
        capture.ReleaseRef();
    }
    return false;
}

}

这是我的:

inline bool Tracker::Init()
{
    if(PXCSession_Create(session.ReleaseRef()) < PXC_STATUS_NO_ERROR || !session.IsValid()) return false;

    for(int i=0; ; ++i) // For valid capture contexts
    { 
        PXCSession::ImplDesc desc, filter = { PXCSession::IMPL_GROUP_SENSOR, PXCSession::IMPL_SUBGROUP_VIDEO_CAPTURE };
        if(session->QueryImpl(&filter, i, &desc) < PXC_STATUS_NO_ERROR) break;
        if(session->CreateImpl(&desc, PXCCapture::CUID, (void**)capture.ReleaseRef()) < PXC_STATUS_NO_ERROR || !capture.IsValid()) continue;

        for(int j=0; ; ++j) // For valid devices
        { 
            PXCCapture::DeviceInfo dinfo;
            if(capture->QueryDevice(j, &dinfo) < PXC_STATUS_NO_ERROR) break;
            if(capture->CreateDevice(j, device.ReleaseRef()) < PXC_STATUS_NO_ERROR || !device.IsValid()) continue;

            for(int k=0; ; ++k) // For valid video streams
            { 
                PXCCapture::Device::StreamInfo sinfo; 
                if(device->QueryStream(k, &sinfo) < PXC_STATUS_NO_ERROR) break;
                if(sinfo.cuid != PXCCapture::VideoStream::CUID || device->CreateStream(k, PXCCapture::VideoStream::CUID, (void**)stream.ReleaseRef()) < PXC_STATUS_NO_ERROR || !device.IsValid()) continue;

                for (int m=0; ; ++m) // For depth buffer profiles of at least 60 FPS
                { 
                    PXCCapture::VideoStream::ProfileInfo pinfo;
                    //if(stream->QueryProfile(m, &pinfo) < PXC_STATUS_NO_ERROR) break;
                    //if(pinfo.imageInfo.format != PXCImage::IMAGE_TYPE_DEPTH || pinfo.frameRateMin.numerator / pinfo.frameRateMin.denominator < 60 || stream->SetProfile(&pinfo) < PXC_STATUS_NO_ERROR) continue;


                    //Sleep(1000);
                    dimx=(int)K.m_colorWidth;
                    dimy=(int)K.m_colorHeight;
                    fovx=0.99;
                    fovy=0.75;


                    // If we can read at least one frame
                    stream->ReadStreamAsync(image.ReleaseRef(), sp.ReleaseRef());
                    if((sp && sp->Synchronize() >= PXC_STATUS_NO_ERROR)||true)
                    {
                        // Obtain useful properties and reserve room for local copies of depth and color images
                        //dimx = pinfo.imageInfo.width; dimy = pinfo.imageInfo.height;
                        //PXCPointF32 flen; device->QueryPropertyAsPoint(PXCCapture::Device::PROPERTY_DEPTH_FOCAL_LENGTH, &flen);

                        //fovx = atan(dimx / (flen.x*2))*2; fovy = atan(dimy / (flen.y*2))*2;
                        color = new unsigned char[dimx*dimy*3];
                        depth = new unsigned short[dimx*dimy];

                        // Initialize tracking library
                        tracker = hsklCreateTracker(HSKL_COORDS_X_RIGHT_Y_DOWN_Z_FWD, HSKL_API_VERSION);
                        hsklSetSensorProperties(tracker, HSKL_SENSOR_IDEAL, dimx, dimy, fovx, fovy);
                        return true;
                    }
                }
                stream.ReleaseRef();
            }
            device.ReleaseRef();
        }
        capture.ReleaseRef();
    }
    return false;
}

}

这是原始更新类:

inline void Tracker::Update() 
{ 
    if(sp && sp->Synchronize() >= PXC_STATUS_NO_ERROR)
    {
        PXCImage::ImageData depthData; image->AcquireAccess(PXCImage::ACCESS_READ, &depthData);
        memcpy_s(depth, sizeof(unsigned short)*dimx*dimy, depthData.planes[0], sizeof(unsigned short)*dimx*dimy);
        const unsigned short * conf = reinterpret_cast<const unsigned short *>(depthData.planes[1]);
        for(int i=0; i<dimx*dimy; ++i) color[3*i+2] = color[3*i+1] = color[3*i] = conf[i]>>2; // Can we just use IR here?
        hsklTrackOneFrame(tracker, depth, conf); // Pass data to tracking library
        image->ReleaseAccess(&depthData);   
    }

    stream->ReadStreamAsync(image.ReleaseRef(), sp.ReleaseRef());
}

和我的:

inline void Tracker::Update() 
{ 
    if((sp && sp->Synchronize() >= PXC_STATUS_NO_ERROR)||true)
    {
        Mat matd(dimy,dimx,CV_16UC1);
        Mat matir(dimy,dimx,CV_16UC1);
        K.getDepth(&matd);
        matir = K.getIR();

        double min,max;
        cv::minMaxLoc(matir,&min,&max);
        Mat exit(dimy,dimx,CV_8UC1);
        matir.convertTo(exit,CV_8U,pow(2.0,8.0)/max);

        memcpy_s(depth,sizeof(unsigned short)*dimx*dimy,(unsigned short*)(matd.ptr()),sizeof(unsigned short)*dimx*dimy);
        const unsigned short * conf = reinterpret_cast<const unsigned short *>(matir.ptr());
        //PXCImage::ImageData depthData; image->AcquireAccess(PXCImage::ACCESS_READ, &depthData);
        //memcpy_s(depth, sizeof(unsigned short)*dimx*dimy, depthData.planes[0], sizeof(unsigned short)*dimx*dimy);
        //const unsigned short * conf = reinterpret_cast<const unsigned short *>(depthData.planes[1]);
    // Can we just use IR here?
        for(int i=0; i<dimx*dimy; ++i) {
            color[3*i+2] = color[3*i+1] = color[3*i] = (exit.at<unsigned char>(i)) ;


        }
        hsklTrackOneFrame(tracker, depth, conf); // Pass data to tracking library
        //image->ReleaseAccess(&depthData); 
    }

    stream->ReadStreamAsync(image.ReleaseRef(), sp.ReleaseRef());
}

如果我尝试运行该程序没有错误......但它不跟踪我的手...所以任何建议?或者是否有人有兴趣并希望与我分享项目? 谢谢

0 个答案:

没有答案