• 在显示彩色影像和深度影像时最好使用WriteableBitmap对象;
  • 要想将骨骼数据影像和深度影像,或者彩色影像叠加到一起,首先要确定深度影像的分辨率和大小,为了方便,这里将深度影像数据和彩色影像数据都采用640x480Fps30的格式,同时将Grid的大小也设置为640*480。
  • 要将骨骼数据和深度影像数据叠加,需要将关节点转换到深度影像所在空间中,可以直接调用MapSkeletonPointToDepthPoint,如果要将骨骼数据叠加到彩色影像上,只需要调用MapSkeletonPointToColorPoint方法。

前端代码

界面很简单,在Grid对象下面有两个Image对象,和一个嵌套的Grid对象。前面两个Image对象分别用来显示彩色影像数据和深度影像数据,后面的Grid对象是用来绘制骨骼的,需要注意的是Grid对象的属性要设置为Transparent,这样的话就可以将Grid上绘制骨骼而不会遮住下面的彩色影像或者深度影像了。


后台代码

public partial class MainWindow : Window
{
private KinectSensor kinectDevice;
private readonly Brush[] skeletonBrushes;//绘图笔刷 private WriteableBitmap depthImageBitMap;
private Int32Rect depthImageBitmapRect;
private Int32 depthImageStride;
private DepthImageFrame lastDepthFrame; private WriteableBitmap colorImageBitmap;
private Int32Rect colorImageBitmapRect;
private int colorImageStride;
private byte[] colorImagePixelData; private Skeleton[] frameSkeletons; public MainWindow()
{
InitializeComponent(); skeletonBrushes = new Brush[] { Brushes.Red }; KinectSensor.KinectSensors.StatusChanged += KinectSensors_StatusChanged;
this.KinectDevice = KinectSensor.KinectSensors.FirstOrDefault(x => x.Status == KinectStatus.Connected);
} public KinectSensor KinectDevice
{
get { return this.kinectDevice; }
set
{
if (this.kinectDevice != value)
{
//Uninitialize
if (this.kinectDevice != null)
{
this.kinectDevice.Stop();
this.kinectDevice.SkeletonFrameReady -= kinectDevice_SkeletonFrameReady;
this.kinectDevice.ColorFrameReady -= kinectDevice_ColorFrameReady;
this.kinectDevice.DepthFrameReady -= kinectDevice_DepthFrameReady;
this.kinectDevice.SkeletonStream.Disable();
this.kinectDevice.DepthStream.Disable();
this.kinectDevice.ColorStream.Disable();
this.frameSkeletons = null;
} this.kinectDevice = value; //Initialize
if (this.kinectDevice != null)
{
if (this.kinectDevice.Status == KinectStatus.Connected)
{
this.kinectDevice.SkeletonStream.Enable();
this.kinectDevice.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30);
this.kinectDevice.DepthStream.Enable(DepthImageFormat.Resolution640x480Fps30);
this.frameSkeletons = new Skeleton[this.kinectDevice.SkeletonStream.FrameSkeletonArrayLength];
this.kinectDevice.SkeletonFrameReady += kinectDevice_SkeletonFrameReady;
this.kinectDevice.ColorFrameReady += kinectDevice_ColorFrameReady;
this.kinectDevice.DepthFrameReady += kinectDevice_DepthFrameReady;
this.kinectDevice.Start(); DepthImageStream depthStream = kinectDevice.DepthStream;
depthStream.Enable(); depthImageBitMap = new WriteableBitmap(depthStream.FrameWidth, depthStream.FrameHeight, , , PixelFormats.Gray16, null);
depthImageBitmapRect = new Int32Rect(, , depthStream.FrameWidth, depthStream.FrameHeight);
depthImageStride = depthStream.FrameWidth * depthStream.FrameBytesPerPixel; ColorImageStream colorStream = kinectDevice.ColorStream;
colorStream.Enable();
colorImageBitmap = new WriteableBitmap(colorStream.FrameWidth, colorStream.FrameHeight,
, , PixelFormats.Bgr32, null);
this.colorImageBitmapRect = new Int32Rect(, , colorStream.FrameWidth, colorStream.FrameHeight);
this.colorImageStride = colorStream.FrameWidth * colorStream.FrameBytesPerPixel;
ColorImage.Source = this.colorImageBitmap; DepthImage.Source = depthImageBitMap;
}
}
}
}
} void kinectDevice_DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e)
{
using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
{
if (depthFrame != null)
{
short[] depthPixelDate = new short[depthFrame.PixelDataLength];
depthFrame.CopyPixelDataTo(depthPixelDate);
depthImageBitMap.WritePixels(depthImageBitmapRect, depthPixelDate, depthImageStride, );
}
}
} void kinectDevice_ColorFrameReady(object sender, ColorImageFrameReadyEventArgs e)
{
using (ColorImageFrame frame = e.OpenColorImageFrame())
{
if (frame != null)
{
byte[] pixelData = new byte[frame.PixelDataLength];
frame.CopyPixelDataTo(pixelData);
this.colorImageBitmap.WritePixels(this.colorImageBitmapRect, pixelData, this.colorImageStride, );
}
}
} void kinectDevice_SkeletonFrameReady(object sender, SkeletonFrameReadyEventArgs e)
{
using (SkeletonFrame frame = e.OpenSkeletonFrame())
{
if (frame != null)
{
Polyline figure;
Brush userBrush;
Skeleton skeleton; LayoutRoot.Children.Clear();
frame.CopySkeletonDataTo(this.frameSkeletons); for (int i = ; i < this.frameSkeletons.Length; i++)
{
skeleton = this.frameSkeletons[i]; if (skeleton.TrackingState == SkeletonTrackingState.Tracked)
{
userBrush = this.skeletonBrushes[i % this.skeletonBrushes.Length]; //绘制头和躯干
figure = CreateFigure(skeleton, userBrush, new[] { JointType.Head, JointType.ShoulderCenter, JointType.ShoulderLeft, JointType.Spine,
JointType.ShoulderRight, JointType.ShoulderCenter, JointType.HipCenter
});
LayoutRoot.Children.Add(figure); figure = CreateFigure(skeleton, userBrush, new[] { JointType.HipLeft, JointType.HipRight });
LayoutRoot.Children.Add(figure); //绘制作腿
figure = CreateFigure(skeleton, userBrush, new[] { JointType.HipCenter, JointType.HipLeft, JointType.KneeLeft, JointType.AnkleLeft, JointType.FootLeft });
LayoutRoot.Children.Add(figure); //绘制右腿
figure = CreateFigure(skeleton, userBrush, new[] { JointType.HipCenter, JointType.HipRight, JointType.KneeRight, JointType.AnkleRight, JointType.FootRight });
LayoutRoot.Children.Add(figure); //绘制左臂
figure = CreateFigure(skeleton, userBrush, new[] { JointType.ShoulderLeft, JointType.ElbowLeft, JointType.WristLeft, JointType.HandLeft });
LayoutRoot.Children.Add(figure); //绘制右臂
figure = CreateFigure(skeleton, userBrush, new[] { JointType.ShoulderRight, JointType.ElbowRight, JointType.WristRight, JointType.HandRight });
LayoutRoot.Children.Add(figure);
}
}
}
}
} private Polyline CreateFigure(Skeleton skeleton, Brush brush, JointType[] joints)
{
Polyline figure = new Polyline(); figure.StrokeThickness = ;
figure.Stroke = brush; for (int i = ; i < joints.Length; i++)
{
figure.Points.Add(GetJointPoint(skeleton.Joints[joints[i]]));
} return figure;
} private Point GetJointPoint(Joint joint)
{
CoordinateMapper cm = new CoordinateMapper(kinectDevice); DepthImagePoint point = cm.MapSkeletonPointToDepthPoint(joint.Position, this.KinectDevice.DepthStream.Format);
//ColorImagePoint point = cm.MapSkeletonPointToColorPoint(joint.Position, this.KinectDevice.ColorStream.Format);
point.X *= (int)this.LayoutRoot.ActualWidth / KinectDevice.DepthStream.FrameWidth;
point.Y *= (int)this.LayoutRoot.ActualHeight / KinectDevice.DepthStream.FrameHeight; return new Point(point.X, point.Y);
} private void KinectSensors_StatusChanged(object sender, StatusChangedEventArgs e)
{
switch (e.Status)
{
case KinectStatus.Initializing:
case KinectStatus.Connected:
case KinectStatus.NotPowered:
case KinectStatus.NotReady:
case KinectStatus.DeviceNotGenuine:
this.KinectDevice = e.Sensor;
break;
case KinectStatus.Disconnected:
//TODO: Give the user feedback to plug-in a Kinect device.
this.KinectDevice = null;
break;
default:
//TODO: Show an error state
break;
}
} }

Kinect 开发 —— 骨骼数据与彩色影像和深度影像的对齐的更多相关文章

  1. Kinect 开发 —— 骨骼追踪(下)

    Kinect 连线游戏 在纸上将一些列数字(用一个圆点表示)从小到大用线连起来.游戏逻辑很简单,只不过我们在这里要实现的是动动手将这些点连起来,而不是用笔或者鼠标. 在开始写代码之前,需要明确定义我们 ...

  2. Kinect 开发 —— 骨骼追踪

    骨骼追踪技术通过处理景深数据来建立人体各个关节的坐标,骨骼追踪能够确定人体的各个部分,如那部分是手,头部,以及身体.骨骼追踪产生X,Y,Z数据来确定这些骨骼点.骨骼追踪系统采用的景深图像处理技术使用更 ...

  3. Kinect 开发 —— 骨骼追踪进阶(上)

    Kinect传感器核心只是发射红外线,并探测红外光反射,从而可以计算出视场范围内每一个像素的深度值.从深度数据中最先提取出来的是物体主体和形状,以及每一个像素点的游戏者索引信息.然后用这些形状信息来匹 ...

  4. Kinect 开发 —— 骨骼追踪 (下)

    基于景深数据的用户交互 骨骼数据中关节点不仅有X,Y值,还有一个深度值 除了使用WPF的3D特性外,在布局系统中可以根据深度值来设定可视化元素的尺寸大小来达到某种程序的立体效果. 下面的例子使用Can ...

  5. Kinect开发文章目录

    整理了一下去年为止到现在写的和翻译的Kinect的相关文章,方便大家查看.另外,最近京东上微软在搞活动, 微软 Kinect for Windows 京东十周年专供礼包 ,如果您想从事Kinect开发 ...

  6. Kinect 开发 —— 杂一

    Kinect 提供了非托管(C++)和托管(.NET)两种开发方式的SDK,如果您用C++开发的话,需要安装Speech Runtime(V11),Kinect for Windows Runtime ...

  7. Kinect 开发 —— 近距离探测

    如何将Kinect设备作为一个近距离探测传感器.为了演示这一点,我们处理的场景可能在以前看到过.就是某一个人是否站在Kinect前面,在Kinect前面移动的是人还是什么其他的物体.当我们设置的触发器 ...

  8. Kinect SDK(1):读取彩色、深度、骨骼信息并用OpenCV显示

    Kinect SDK 读取彩色.深度.骨骼信息并用OpenCV显示 一.原理说明 对于原理相信大家都明白大致的情况,因此,在此只说比较特别的部分. 1.1 深度流数据: 深度数据流所提供的图像帧中,每 ...

  9. Kinect开发学习笔记之(一)Kinect介绍和应用

    Kinect开发学习笔记之(一)Kinect介绍和应用 zouxy09@qq.com http://blog.csdn.net/zouxy09 一.Kinect简单介绍 Kinectfor Xbox ...

随机推荐

  1. 阿里云安装mysql数据库出现2002错误解决办法

    在安装数据库的时候出现了如下错误: 解决办法如下: 1.在bin目录下 输入:kill -s 9 9907 再输入:ps -ef|grep mysql 显示如下: 2.回到lampp目录下,重启数据库 ...

  2. GoldenGate 双向复制解决方案

    1 双向复制方案简介 在双向复制(Bidirectional)方案中,可以采用以下两种部署方式: 方式一:配置源和目标数据库可以同时保持Active 状态,同时进行应用系统的事务处理, 此时需由应用系 ...

  3. PyCharm 2017 Mac 免注册版破解安装说明

    PyCharm 2017 Mac 免注册版破解安装说明 下载完成安装包后,双击打开,将左侧拖拽至右侧应用程序,默认安装. 打开软件,在License server address中填入[http:// ...

  4. Java中将String转json对象

    import org.json.simple.JSONObject; import org.json.simple.parser.JSONParser; import org.json.simple. ...

  5. XRDP与VNC的关系(转载)

    XRDP与VNC的关系 如果仅仅安装XRDP协议.是不能在windows上使用远程桌面连接到Ubuntu. 还须要安装VNCServer才行. 所以,XRDP启动之后.系统会自己主动启动一个VNC会话 ...

  6. ML words

    samples:样本 multi-dimensional entry / multivariate data:多属性记录 features:特征,属性 supervised learning:监督学习 ...

  7. 洛谷1726 上白泽慧音 tarjan模板

    题目描述 在幻想乡,上白泽慧音是以知识渊博闻名的老师.春雪异变导致人间之里的很多道路都被大雪堵塞,使有的学生不能顺利地到达慧音所在的村庄.因此慧音决定换一个能够聚集最多人数的村庄作为新的教学地点.人间 ...

  8. 网页加速之Chromium 预载入 Prerendering

    前一篇博文已经介绍通过prefetch预先载入网页的资源来提升网页载入速度,以下我们一起来看一下网页加速之chromium prerendering.在介绍prerendering之前,先介绍两个概念 ...

  9. berkeley db储存URL队列的简单实现增、删、查

     Berkeley DB(BDB)是一个高效的嵌入式数据库编程库,C语言.C++.Java.Perl.Python.Tcl以及其它非常多语言都有其相应的API. Berkeley DB能够保存随意 ...

  10. failed to open stream: HTTP request failed! HTTP/1.1 404 Not Found

    failed to open stream: HTTP request failed! HTTP/1.1 404 Not Found 一.总结 一句话总结:这里出现的问题是我在博客园删除了一篇文章,时 ...