Kinect 开发 —— 骨骼数据与彩色影像和深度影像的对齐
- 在显示彩色影像和深度影像时最好使用WriteableBitmap对象;
- 要想将骨骼数据影像和深度影像,或者彩色影像叠加到一起,首先要确定深度影像的分辨率和大小,为了方便,这里将深度影像数据和彩色影像数据都采用640x480Fps30的格式,同时将Grid的大小也设置为640*480。
- 要将骨骼数据和深度影像数据叠加,需要将关节点转换到深度影像所在空间中,可以直接调用MapSkeletonPointToDepthPoint,如果要将骨骼数据叠加到彩色影像上,只需要调用MapSkeletonPointToColorPoint方法。
前端代码
界面很简单,在Grid对象下面有两个Image对象,和一个嵌套的Grid对象。前面两个Image对象分别用来显示彩色影像数据和深度影像数据,后面的Grid对象是用来绘制骨骼的,需要注意的是Grid对象的属性要设置为Transparent,这样的话就可以将Grid上绘制骨骼而不会遮住下面的彩色影像或者深度影像了。
后台代码
public partial class MainWindow : Window
{
private KinectSensor kinectDevice;
private readonly Brush[] skeletonBrushes;//绘图笔刷 private WriteableBitmap depthImageBitMap;
private Int32Rect depthImageBitmapRect;
private Int32 depthImageStride;
private DepthImageFrame lastDepthFrame; private WriteableBitmap colorImageBitmap;
private Int32Rect colorImageBitmapRect;
private int colorImageStride;
private byte[] colorImagePixelData; private Skeleton[] frameSkeletons; public MainWindow()
{
InitializeComponent(); skeletonBrushes = new Brush[] { Brushes.Red }; KinectSensor.KinectSensors.StatusChanged += KinectSensors_StatusChanged;
this.KinectDevice = KinectSensor.KinectSensors.FirstOrDefault(x => x.Status == KinectStatus.Connected);
} public KinectSensor KinectDevice
{
get { return this.kinectDevice; }
set
{
if (this.kinectDevice != value)
{
//Uninitialize
if (this.kinectDevice != null)
{
this.kinectDevice.Stop();
this.kinectDevice.SkeletonFrameReady -= kinectDevice_SkeletonFrameReady;
this.kinectDevice.ColorFrameReady -= kinectDevice_ColorFrameReady;
this.kinectDevice.DepthFrameReady -= kinectDevice_DepthFrameReady;
this.kinectDevice.SkeletonStream.Disable();
this.kinectDevice.DepthStream.Disable();
this.kinectDevice.ColorStream.Disable();
this.frameSkeletons = null;
} this.kinectDevice = value; //Initialize
if (this.kinectDevice != null)
{
if (this.kinectDevice.Status == KinectStatus.Connected)
{
this.kinectDevice.SkeletonStream.Enable();
this.kinectDevice.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30);
this.kinectDevice.DepthStream.Enable(DepthImageFormat.Resolution640x480Fps30);
this.frameSkeletons = new Skeleton[this.kinectDevice.SkeletonStream.FrameSkeletonArrayLength];
this.kinectDevice.SkeletonFrameReady += kinectDevice_SkeletonFrameReady;
this.kinectDevice.ColorFrameReady += kinectDevice_ColorFrameReady;
this.kinectDevice.DepthFrameReady += kinectDevice_DepthFrameReady;
this.kinectDevice.Start(); DepthImageStream depthStream = kinectDevice.DepthStream;
depthStream.Enable(); depthImageBitMap = new WriteableBitmap(depthStream.FrameWidth, depthStream.FrameHeight, , , PixelFormats.Gray16, null);
depthImageBitmapRect = new Int32Rect(, , depthStream.FrameWidth, depthStream.FrameHeight);
depthImageStride = depthStream.FrameWidth * depthStream.FrameBytesPerPixel; ColorImageStream colorStream = kinectDevice.ColorStream;
colorStream.Enable();
colorImageBitmap = new WriteableBitmap(colorStream.FrameWidth, colorStream.FrameHeight,
, , PixelFormats.Bgr32, null);
this.colorImageBitmapRect = new Int32Rect(, , colorStream.FrameWidth, colorStream.FrameHeight);
this.colorImageStride = colorStream.FrameWidth * colorStream.FrameBytesPerPixel;
ColorImage.Source = this.colorImageBitmap; DepthImage.Source = depthImageBitMap;
}
}
}
}
} void kinectDevice_DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e)
{
using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
{
if (depthFrame != null)
{
short[] depthPixelDate = new short[depthFrame.PixelDataLength];
depthFrame.CopyPixelDataTo(depthPixelDate);
depthImageBitMap.WritePixels(depthImageBitmapRect, depthPixelDate, depthImageStride, );
}
}
} void kinectDevice_ColorFrameReady(object sender, ColorImageFrameReadyEventArgs e)
{
using (ColorImageFrame frame = e.OpenColorImageFrame())
{
if (frame != null)
{
byte[] pixelData = new byte[frame.PixelDataLength];
frame.CopyPixelDataTo(pixelData);
this.colorImageBitmap.WritePixels(this.colorImageBitmapRect, pixelData, this.colorImageStride, );
}
}
} void kinectDevice_SkeletonFrameReady(object sender, SkeletonFrameReadyEventArgs e)
{
using (SkeletonFrame frame = e.OpenSkeletonFrame())
{
if (frame != null)
{
Polyline figure;
Brush userBrush;
Skeleton skeleton; LayoutRoot.Children.Clear();
frame.CopySkeletonDataTo(this.frameSkeletons); for (int i = ; i < this.frameSkeletons.Length; i++)
{
skeleton = this.frameSkeletons[i]; if (skeleton.TrackingState == SkeletonTrackingState.Tracked)
{
userBrush = this.skeletonBrushes[i % this.skeletonBrushes.Length]; //绘制头和躯干
figure = CreateFigure(skeleton, userBrush, new[] { JointType.Head, JointType.ShoulderCenter, JointType.ShoulderLeft, JointType.Spine,
JointType.ShoulderRight, JointType.ShoulderCenter, JointType.HipCenter
});
LayoutRoot.Children.Add(figure); figure = CreateFigure(skeleton, userBrush, new[] { JointType.HipLeft, JointType.HipRight });
LayoutRoot.Children.Add(figure); //绘制作腿
figure = CreateFigure(skeleton, userBrush, new[] { JointType.HipCenter, JointType.HipLeft, JointType.KneeLeft, JointType.AnkleLeft, JointType.FootLeft });
LayoutRoot.Children.Add(figure); //绘制右腿
figure = CreateFigure(skeleton, userBrush, new[] { JointType.HipCenter, JointType.HipRight, JointType.KneeRight, JointType.AnkleRight, JointType.FootRight });
LayoutRoot.Children.Add(figure); //绘制左臂
figure = CreateFigure(skeleton, userBrush, new[] { JointType.ShoulderLeft, JointType.ElbowLeft, JointType.WristLeft, JointType.HandLeft });
LayoutRoot.Children.Add(figure); //绘制右臂
figure = CreateFigure(skeleton, userBrush, new[] { JointType.ShoulderRight, JointType.ElbowRight, JointType.WristRight, JointType.HandRight });
LayoutRoot.Children.Add(figure);
}
}
}
}
} private Polyline CreateFigure(Skeleton skeleton, Brush brush, JointType[] joints)
{
Polyline figure = new Polyline(); figure.StrokeThickness = ;
figure.Stroke = brush; for (int i = ; i < joints.Length; i++)
{
figure.Points.Add(GetJointPoint(skeleton.Joints[joints[i]]));
} return figure;
} private Point GetJointPoint(Joint joint)
{
CoordinateMapper cm = new CoordinateMapper(kinectDevice); DepthImagePoint point = cm.MapSkeletonPointToDepthPoint(joint.Position, this.KinectDevice.DepthStream.Format);
//ColorImagePoint point = cm.MapSkeletonPointToColorPoint(joint.Position, this.KinectDevice.ColorStream.Format);
point.X *= (int)this.LayoutRoot.ActualWidth / KinectDevice.DepthStream.FrameWidth;
point.Y *= (int)this.LayoutRoot.ActualHeight / KinectDevice.DepthStream.FrameHeight; return new Point(point.X, point.Y);
} private void KinectSensors_StatusChanged(object sender, StatusChangedEventArgs e)
{
switch (e.Status)
{
case KinectStatus.Initializing:
case KinectStatus.Connected:
case KinectStatus.NotPowered:
case KinectStatus.NotReady:
case KinectStatus.DeviceNotGenuine:
this.KinectDevice = e.Sensor;
break;
case KinectStatus.Disconnected:
//TODO: Give the user feedback to plug-in a Kinect device.
this.KinectDevice = null;
break;
default:
//TODO: Show an error state
break;
}
} }
Kinect 开发 —— 骨骼数据与彩色影像和深度影像的对齐的更多相关文章
- Kinect 开发 —— 骨骼追踪(下)
Kinect 连线游戏 在纸上将一些列数字(用一个圆点表示)从小到大用线连起来.游戏逻辑很简单,只不过我们在这里要实现的是动动手将这些点连起来,而不是用笔或者鼠标. 在开始写代码之前,需要明确定义我们 ...
- Kinect 开发 —— 骨骼追踪
骨骼追踪技术通过处理景深数据来建立人体各个关节的坐标,骨骼追踪能够确定人体的各个部分,如那部分是手,头部,以及身体.骨骼追踪产生X,Y,Z数据来确定这些骨骼点.骨骼追踪系统采用的景深图像处理技术使用更 ...
- Kinect 开发 —— 骨骼追踪进阶(上)
Kinect传感器核心只是发射红外线,并探测红外光反射,从而可以计算出视场范围内每一个像素的深度值.从深度数据中最先提取出来的是物体主体和形状,以及每一个像素点的游戏者索引信息.然后用这些形状信息来匹 ...
- Kinect 开发 —— 骨骼追踪 (下)
基于景深数据的用户交互 骨骼数据中关节点不仅有X,Y值,还有一个深度值 除了使用WPF的3D特性外,在布局系统中可以根据深度值来设定可视化元素的尺寸大小来达到某种程序的立体效果. 下面的例子使用Can ...
- Kinect开发文章目录
整理了一下去年为止到现在写的和翻译的Kinect的相关文章,方便大家查看.另外,最近京东上微软在搞活动, 微软 Kinect for Windows 京东十周年专供礼包 ,如果您想从事Kinect开发 ...
- Kinect 开发 —— 杂一
Kinect 提供了非托管(C++)和托管(.NET)两种开发方式的SDK,如果您用C++开发的话,需要安装Speech Runtime(V11),Kinect for Windows Runtime ...
- Kinect 开发 —— 近距离探测
如何将Kinect设备作为一个近距离探测传感器.为了演示这一点,我们处理的场景可能在以前看到过.就是某一个人是否站在Kinect前面,在Kinect前面移动的是人还是什么其他的物体.当我们设置的触发器 ...
- Kinect SDK(1):读取彩色、深度、骨骼信息并用OpenCV显示
Kinect SDK 读取彩色.深度.骨骼信息并用OpenCV显示 一.原理说明 对于原理相信大家都明白大致的情况,因此,在此只说比较特别的部分. 1.1 深度流数据: 深度数据流所提供的图像帧中,每 ...
- Kinect开发学习笔记之(一)Kinect介绍和应用
Kinect开发学习笔记之(一)Kinect介绍和应用 zouxy09@qq.com http://blog.csdn.net/zouxy09 一.Kinect简单介绍 Kinectfor Xbox ...
随机推荐
- Spring:dispatchservlet
DispatcherServlet 是 Spring MVC 中负责请求调度的核心引擎,所有的请求将由此 Servlet 根据配置分发至各个逻辑处理单元.其内部同时也维护了一个ApplicationC ...
- windows或linux安装python
一.windows安装 先进入 python 官网:https://www.python.org/downloads/windows/ 选择合适的版本下载: 下载完成,双击运行安装[勾选Add to ...
- Linux Shell脚本编程-函数
函数介绍 定义:把一段独立功能的的代码当做一个整体,并为之一个名字,命名的代码段,此即为函数: 功能:函数function是由若干条shell命令组成的语句块,实现代码重用和模块化编程. 注意: ...
- 有关于OpenGL、OpenGL ES、WebGL的小结
转自原文 有关于OpenGL.OpenGL ES.WebGL的小结 一. OpenGL简介 OpenGL(全写Open Graphics Library)是个定义了一个跨编程语言.跨平台的编程接口 ...
- 警惕 InnoDB 和 MyISAM 创建 Hash 索引陷阱
MySql 最经常使用存储引擎 InnoDB 和 MyISAM 都不支持 Hash 索引,它们默认的索引都是 B-Tree.可是假设你在创建索引的时候定义其类型为 Hash,MySql 并不会报错,并 ...
- myeclipse2013 jsp编辑初始化
首先,大家可能有过这种经历.双击打开jsp编辑.它默认会打开视图,这样就使人恼火了,卡死了.所以我们能够自己设jsp的默认打开方式:打开Window-->preferences得: 搜索edit ...
- [Recompose] Compose Streams of React Props with Recompose’s compose and RxJS
Functions created with mapPropsStream canned be composed together to build up powerful streams. Brin ...
- POJ 3670 Eating Together(LIS)
Description The cows are so very silly about their dinner partners. They have organized themselves i ...
- 开源企业IM-免费企业即时通讯-ENTBOOST V0.9版本号公布
ENTBOOST V0.9版本号公布,更新内容:1.完好多人群组聊天,提高群组聊天性能及稳定性:2.苹果IOS SDK.添加联系人管理功能,优化API和内部流程.修复部分BUG.3.添加企业应用功能集 ...
- Redis封装之Hash
RedisHashService: /// <summary> /// Hash:类似dictionary,通过索引快速定位到指定元素的,耗时均等,跟string的区别在于不用反序列化,直 ...