Kinect - 检测人类退出框架的时间

时间:2012-01-11 21:40:31

标签: c# kinect

所以我要做的就是拿Kinect Skeletal Sample并保存x张照片,只有当人经过时。我已经得到它的工作,除了一旦它检测到人类它只记录x量的照片,即使一个人离开Kinect的愿景。有没有人知道如何制作它,以便一旦一个人进入它开始录音,一旦他们离开它停止?

变量

   Runtime nui;
    int totalFrames = 0;
    int totalFrames2 = 0;
    int lastFrames = 0;
    int lastFrameWithMotion = 0;
    int stopFrameNumber = 100;
    DateTime lastTime = DateTime.MaxValue;

进入/退出框架

 void nui_SkeletonFrameReady(object sender, SkeletonFrameReadyEventArgs e)
    {
        SkeletonFrame skeletonFrame = e.SkeletonFrame;

        int iSkeleton = 0;
        ++totalFrames;
        string bb1 = Convert.ToString(totalFrames);
        //Uri uri1 = new Uri("C:\\Research\\Kinect\\Proposal_Skeleton\\Skeleton_Img" + bb1 + ".png");
        Uri uri1 = new Uri("C:\\temp\\Skeleton_Img" + bb1 + ".png");
        // string file_name_3 = "C:\\Research\\Kinect\\Proposal_Skeleton\\Skeleton_Img" + bb1 + ".png";    // xxx 
        Brush[] brushes = new Brush[6];
        brushes[0] = new SolidColorBrush(Color.FromRgb(255, 0, 0));
        brushes[1] = new SolidColorBrush(Color.FromRgb(0, 255, 0));
        brushes[2] = new SolidColorBrush(Color.FromRgb(64, 255, 255));
        brushes[3] = new SolidColorBrush(Color.FromRgb(255, 255, 64));
        brushes[4] = new SolidColorBrush(Color.FromRgb(255, 64, 255));
        brushes[5] = new SolidColorBrush(Color.FromRgb(128, 128, 255));

        skeleton.Children.Clear();
        //byte[] skeletonFrame32 = new byte[(int)(skeleton.Width) * (int)(skeleton.Height) * 4];
        foreach (SkeletonData data in skeletonFrame.Skeletons)
        {
            if (SkeletonTrackingState.Tracked == data.TrackingState)
            {
                // Draw bones
                Brush brush = brushes[iSkeleton % brushes.Length];
                skeleton.Children.Add(getBodySegment(data.Joints, brush, JointID.HipCenter, JointID.Spine, JointID.ShoulderCenter, JointID.Head));
                skeleton.Children.Add(getBodySegment(data.Joints, brush, JointID.ShoulderCenter, JointID.ShoulderLeft, JointID.ElbowLeft, JointID.WristLeft, JointID.HandLeft));
                skeleton.Children.Add(getBodySegment(data.Joints, brush, JointID.ShoulderCenter, JointID.ShoulderRight, JointID.ElbowRight, JointID.WristRight, JointID.HandRight));
                skeleton.Children.Add(getBodySegment(data.Joints, brush, JointID.HipCenter, JointID.HipLeft, JointID.KneeLeft, JointID.AnkleLeft, JointID.FootLeft));
                skeleton.Children.Add(getBodySegment(data.Joints, brush, JointID.HipCenter, JointID.HipRight, JointID.KneeRight, JointID.AnkleRight, JointID.FootRight));

                // Draw joints
                // try to add a comment, xxx
                foreach (Joint joint in data.Joints)
                {
                    Point jointPos = getDisplayPosition(joint);
                    Line jointLine = new Line();
                    jointLine.X1 = jointPos.X - 3;
                    jointLine.X2 = jointLine.X1 + 6;
                    jointLine.Y1 = jointLine.Y2 = jointPos.Y;
                    jointLine.Stroke = jointColors[joint.ID];
                    jointLine.StrokeThickness = 6;
                    skeleton.Children.Add(jointLine);
                }
                //       ExportToPng(uri1, skeleton);
               // SoundPlayerAction Source = "C:/LiamScienceFair/muhaha.wav";
                //SoundPlayer player1 = new SoundPlayer("muhaha.wav")
               //  player1.Play(); 
              // MediaPlayer.
               // axWindowsMediaPlayer1.currentPlaylist = axWindowsMediaPlayer1.mediaCollection.getByName("mediafile");


                nui.VideoFrameReady += new EventHandler<ImageFrameReadyEventArgs>(nui_ColorFrameReady2);



            }


            iSkeleton++;
        } // for each skeleton
    }

实际代码

    void nui_ColorFrameReady2(object sender, ImageFrameReadyEventArgs e)
    {
        // 32-bit per pixel, RGBA image  xxx
        PlanarImage Image = e.ImageFrame.Image;
        int deltaFrames = totalFrames - lastFrameWithMotion;
        if (totalFrames2 <= stopFrameNumber & deltaFrames > 300)
        {
            ++totalFrames2;
            string bb1 = Convert.ToString(totalFrames2);
            // string file_name_3 = "C:\\Research\\Kinect\\Proposal\\Depth_Img" + bb1 + ".jpg"; xxx
            string file_name_4 = "C:\\temp\\Video2_Img" + bb1 + ".jpg";
            video.Source = BitmapSource.Create(
                Image.Width, Image.Height, 96, 96, PixelFormats.Bgr32, null, Image.Bits, Image.Width * Image.BytesPerPixel);

            BitmapSource image4 = BitmapSource.Create(
                Image.Width, Image.Height, 96, 96, PixelFormats.Bgr32, null, Image.Bits, Image.Width * Image.BytesPerPixel);
            image4.Save(file_name_4, Coding4Fun.Kinect.Wpf.ImageFormat.Jpeg);
            if (totalFrames2 == stopFrameNumber)
            {
                lastFrameWithMotion = totalFrames;
                stopFrameNumber += 100;
            }
        }

    }

2 个答案:

答案 0 :(得分:3)

在我在骨架跟踪事件区域中使用的大多数设置中,检查是否(骨架!= null)您需要做的就是在收到空骨架后重置触发器。

每次触发事件时,SDK都会发送一个骨架,所以......

if(skeleton != null)
{
  \\do image taking here
}
else
{
  \\reset image counter
}

答案 1 :(得分:2)

我会尝试这样的事情。创建名为SkeletonInFrame的bool类变量并将其初始化为false。每次SkeletonFrameReady触发时,将此bool设置为true。处理颜色框时,仅在此变量为true时进行处理。然后在处理颜色框后,将变量设置为false。这可以帮助您在不再接收骨架事件时停止处理帧。