kinect 深度图与彩色图对齐程序
//#include "duiqi.hpp"
#include "kinect.h"
#include <iostream>
#include "opencv2/opencv.hpp"
#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <fstream>
using namespace cv;
using namespace std;
Mat depthFilter(UINT16 *depthData);
template<class Interface>
inline void SafeRelease(Interface *& pInterfaceToRelease)
{
if (pInterfaceToRelease != NULL)
{
pInterfaceToRelease->Release();
pInterfaceToRelease = NULL;
}
}
UINT16 uDepthMin = 0, uDepthMax = 0;
int main()
{
IKinectSensor* m_pKinectSensor;
HRESULT hr;
ICoordinateMapper* m_pCoordinateMapper = NULL;
CameraIntrinsics* m_pCameraIntrinsics = new CameraIntrinsics();
hr = GetDefaultKinectSensor(&m_pKinectSensor);
if (FAILED(hr))
{
return hr;
}
IMultiSourceFrameReader* m_pMultiFrameReader = NULL;
if (m_pKinectSensor)
{
hr = m_pKinectSensor->Open();
if (SUCCEEDED(hr))
{
hr = m_pKinectSensor->OpenMultiSourceFrameReader(
FrameSourceTypes::FrameSourceTypes_Color |
FrameSourceTypes::FrameSourceTypes_Infrared |
FrameSourceTypes::FrameSourceTypes_Depth,
&m_pMultiFrameReader);
}
}
if (SUCCEEDED(hr))
{
hr = m_pKinectSensor->get_CoordinateMapper(&m_pCoordinateMapper);
}
if (!m_pKinectSensor || FAILED(hr))
{
return E_FAIL;
}
IMultiSourceFrame* m_pMultiFrame = nullptr;
IDepthFrameReference* m_pDepthFrameReference = NULL;
IColorFrameReference* m_pColorFrameReference = NULL;
IInfraredFrameReference* m_pInfraredFrameReference = NULL;
IInfraredFrame* m_pInfraredFrame = NULL;
IDepthFrame* m_pDepthFrame = NULL;
IColorFrame* m_pColorFrame = NULL;
Mat rgb(1080, 1920, CV_8UC4);
Mat rgb_resize(540, 960, CV_8UC4);
DepthSpacePoint* m_pDepthCoordinates = NULL;
ColorSpacePoint* m_pColorCoordinates = NULL;
CameraSpacePoint* m_pCameraCoordinates = NULL;
m_pColorCoordinates = new ColorSpacePoint[512 * 424];
m_pCameraCoordinates = new CameraSpacePoint[512 * 424];
UINT16 *depthData = new UINT16[424 * 512];
Mat depth(424, 512, CV_16UC1);
Mat depth8U(424, 512, CV_8U);
vector<DepthSpacePoint> depthSpacePoints(1920 * 1080);
Mat CoordinateMapperMat(1080, 1520, CV_8U);
Mat CoordinateMapperMat_resize(540, 960, CV_8U);
int savecount = 0;
while (true)
{
hr = m_pMultiFrameReader->AcquireLatestFrame(&m_pMultiFrame);
if (FAILED(hr) || !m_pMultiFrame)
{
continue;
}
if (SUCCEEDED(hr))
hr = m_pMultiFrame->get_ColorFrameReference(&m_pColorFrameReference);
if (SUCCEEDED(hr))
hr = m_pColorFrameReference->AcquireFrame(&m_pColorFrame);
if (SUCCEEDED(hr))
hr = m_pMultiFrame->get_DepthFrameReference(&m_pDepthFrameReference);
if (SUCCEEDED(hr))
hr = m_pDepthFrameReference->AcquireFrame(&m_pDepthFrame);
/*m_pDepthFrame->get_DepthMinReliableDistance(&uDepthMin);
m_pDepthFrame->get_DepthMaxReliableDistance(&uDepthMax);
cout << "Reliable Distance: " << uDepthMin << " - " << uDepthMax << endl;*/
savecount++;
cout << savecount << endl;
ostringstream savecountstr;
savecountstr << savecount;
UINT nColorBufferSize = 1920 * 1080 * 4;
if (SUCCEEDED(hr))
{
hr = m_pColorFrame->CopyConvertedFrameDataToArray(nColorBufferSize, reinterpret_cast<BYTE*>(rgb.data), ColorImageFormat::ColorImageFormat_Bgra);
Rect rect(200, 0, 1520, 1080);
Mat rgb_roi = rgb(rect);
resize(rgb_roi, rgb_resize, Size(), 0.4, 0.4);
imshow("color_resize", rgb_resize);
imwrite("D:/file/hust/ARcodes/ARKinect∂‘∆Î181107/ARKinect/save6/color/" + savecountstr.str() + ".png", rgb_resize);
}
UINT nDepthBufferSize = 424 * 512;
if (SUCCEEDED(hr))
{
hr = m_pDepthFrame->CopyFrameDataToArray(nDepthBufferSize, reinterpret_cast<UINT16*>(depthData));
m_pDepthFrame->CopyFrameDataToArray(nDepthBufferSize, reinterpret_cast<UINT16*>(depth.data));
//16位转成了8位显示
depth.convertTo(depth8U, CV_8U, 255.0f / 4500.0f);
imshow("depth", depth8U);
}
//16位数据
Mat filterDepth = depthFilter(depthData);
Mat filterDepth8U;
filterDepth.convertTo(filterDepth8U, CV_8U, 255.0f / 4500.0f);
if (SUCCEEDED(hr))
{
hr = m_pCoordinateMapper->MapColorFrameToDepthSpace(424 * 512, reinterpret_cast<UINT16*>(filterDepth.data), 1920 * 1080, &depthSpacePoints[0]);
}
if (SUCCEEDED(hr))
{
CoordinateMapperMat = Scalar(0, 0, 0, 0); // ∂®“ÂŒ™Mat(colorHeight, colorWidth, CV_8UC4)
for (int y = 0; y < 1080; y++)
{
for (int x = 200; x < 1720; x++)
//for (int x = 0; x < 1920; x++)
{
unsigned int index = y * 1920 + x;
DepthSpacePoint p = depthSpacePoints[index];
if (p.X != -std::numeric_limits<float>::infinity() && p.Y != -std::numeric_limits<float>::infinity())
{
int depthX = static_cast<int>(p.X + 0.2f);
int depthY = static_cast<int>(p.Y + 0.2f);
if ((depthX >= 0) && (depthX < 512) && (depthY >= 0) && (depthY < 424))
{
CoordinateMapperMat.at<uchar>(y, x - 200) = filterDepth8U.at<uchar>(depthY, depthX);
}
}
}
}
resize(CoordinateMapperMat, CoordinateMapperMat_resize, Size(), 0.4, 0.4);
imshow("CoordinateMapper", CoordinateMapperMat_resize);
imwrite("D:/file/hust/ARcodes/ARKinect∂‘∆Î181107/ARKinect/save6/result/" + savecountstr.str() + ".png", CoordinateMapperMat_resize);
}
int c = waitKey(1);
if ((char)c == VK_ESCAPE)
break;
SafeRelease(m_pColorFrame);
SafeRelease(m_pDepthFrame);
SafeRelease(m_pColorFrameReference);
SafeRelease(m_pDepthFrameReference);
SafeRelease(m_pMultiFrame);
}
cv::destroyAllWindows();
SafeRelease(m_pCoordinateMapper);
m_pKinectSensor->Close();
std::system("pause");
return 0;
}
Mat depthFilter(UINT16 *depthData) {
Mat i_before(424, 512, CV_8UC4);
Mat i_after(424, 512, CV_8UC4);
Mat i_result(424, 512, CV_16UC1);
cv::Mat i_result8U;
unsigned short maxDepth = 0;
unsigned short iZeroCountBefore = 0;
unsigned short iZeroCountAfter = 0;
unsigned short* depthArray = (unsigned short*)depthData;
for (int i = 0; i < 512 * 424; i++)
{
int row = i / 512;
int col = i % 512;
unsigned short depthValue = depthArray[row * 512 + col];
if (depthValue == 0)
{
i_before.data[i * 4] = 255;
i_before.data[i * 4 + 1] = 0;
i_before.data[i * 4 + 2] = 0;
i_before.data[i * 4 + 3] = depthValue / 256;
iZeroCountBefore++;
}
else
{
i_before.data[i * 4] = depthValue / 4500.0f * 256;
i_before.data[i * 4 + 1] = depthValue / 4500.0f * 256;
i_before.data[i * 4 + 2] = depthValue / 4500.0f * 256;
i_before.data[i * 4 + 3] = depthValue / 4500.0f * 256;
}
maxDepth = depthValue > maxDepth ? depthValue : maxDepth;
}
//cout << "max depth value: " << maxDepth << endl;
unsigned short* smoothDepthArray = (unsigned short*)i_result.data;
int widthBound = 512 - 1;
int heightBound = 424 - 1;
int innerBandThreshold = 1;
int outerBandThreshold = 3;
for (int depthArrayRowIndex = 0; depthArrayRowIndex<424; depthArrayRowIndex++)
{
for (int depthArrayColumnIndex = 0; depthArrayColumnIndex < 512; depthArrayColumnIndex++)
{
int depthIndex = depthArrayColumnIndex + (depthArrayRowIndex * 512);
if (depthArray[depthIndex] == 0)
{
int x = depthIndex % 512;
int y = (depthIndex - x) / 512;
unsigned short filterCollection[24][2] = { 0 };
int innerBandCount = 0;
int outerBandCount = 0;
for (int yi = -2; yi < 3; yi++)
{
for (int xi = -2; xi < 3; xi++)
{
if (xi != 0 || yi != 0)
{
int xSearch = x + xi;
int ySearch = y + yi;
if (xSearch >= 0 && xSearch <= widthBound &&
ySearch >= 0 && ySearch <= heightBound)
{
int index = xSearch + (ySearch * 512);
if (depthArray[index] != 0)
{
for (int i = 0; i < 24; i++)
{
if (filterCollection[i][0] == depthArray[index])
{
filterCollection[i][1]++;
break;
}
else if (filterCollection[i][0] == 0)
{
filterCollection[i][0] = depthArray[index];
filterCollection[i][1]++;
break;
}
}
if (yi != 2 && yi != -2 && xi != 2 && xi != -2)
innerBandCount++;
else
outerBandCount++;
}
}
}
}
}
if (innerBandCount >= innerBandThreshold || outerBandCount >= outerBandThreshold)
{
short frequency = 0;
short depth = 0;
for (int i = 0; i < 24; i++)
{
if (filterCollection[i][0] == 0)
break;
if (filterCollection[i][1] > frequency)
{
depth = filterCollection[i][0];
frequency = filterCollection[i][1];
}
}
smoothDepthArray[depthIndex] = depth;
}
else
{
smoothDepthArray[depthIndex] = 0;
}
}
else
{
smoothDepthArray[depthIndex] = depthArray[depthIndex];
}
}
}
for (int i = 0; i < 512 * 424; i++)
{
int row = i / 512;
int col = i % 512;
unsigned short depthValue = smoothDepthArray[row * 512 + col];
if (depthValue == 0)
{
i_after.data[i * 4] = 255;
i_after.data[i * 4 + 1] = 0;
i_after.data[i * 4 + 2] = 0;
i_after.data[i * 4 + 3] = depthValue / 256;
iZeroCountAfter++;
}
else
{
i_after.data[i * 4] = depthValue / 4500.0f * 256;
i_after.data[i * 4 + 1] = depthValue / 4500.0f * 256;
i_after.data[i * 4 + 2] = depthValue / 4500.0f * 256;
i_after.data[i * 4 + 3] = depthValue / 4500.0f * 256;
}
}
i_result.convertTo(i_result8U, CV_8U, 255.0f / 4500.0f); // uDepthMax
return i_result;
}
kinect 深度图与彩色图对齐程序的更多相关文章
- OpenNI1.5获取华硕XtionProLive深度图和彩色图并用OpenCV显示
华硕XtionPro类似Kinect,都是体感摄像机,可捕捉深度图和彩色图. 具体參数见:http://www.asus.com.cn/Multimedia/Xtion_PRO_LIVE/specif ...
- OpenNI2获取华硕XtionProLive深度图和彩色图并用OpenCV显示
使用OpenNI2打开XtionProLive时有个问题,彩色图分辨率不管怎样设置始终是320*240,深度图倒是能够设成640*480,而OpenNI1.x是能够获取640*480的彩色图的. 彩色 ...
- Android-将RGB彩色图转换为灰度图
package com.example.yanlei.wifi; import android.graphics.Bitmap; import android.graphics.BitmapFacto ...
- 基于.Net core3.0 开发的斗图小程序后端+斗图小程序
为啥要写这么一个小程序? 作为互联网的原住民. 90后程序员的我,从高中开始发QQ小表情. 到之后的熊猫头,蘑菇头. 可以说表情包陪伴我从学校到社会,从青少年到中年.. 而且因为斗图厉害,还找到一个女 ...
- QT 实现彩色图亮度均衡,RGB和HSI空间互相转换
从昨天折腾到今天.再折腾下去我都要上主楼了 大致和灰度图均衡是一样的,主要是不能像平滑什么的直接对R,G,B三个分量进行.这样出来的图像时没法看的.因此我们要对亮度进行均衡.而HSI彩色空间中的分量 ...
- perf + Flame Graph火焰图分析程序性能
1.perf命令简要介绍 性能调优时,我们通常需要分析查找到程序百分比高的热点代码片段,这便需要使用 perf record 记录单个函数级别的统计信息,并使用 perf report 来显示统计结果 ...
- [转]perf + 火焰图分析程序性能
1.perf命令简要介绍 性能调优时,我们通常需要分析查找到程序百分比高的热点代码片段,这便需要使用 perf record 记录单个函数级别的统计信息,并使用 perf report 来显示统计结果 ...
- perf + 火焰图分析程序性能
1.perf命令简要介绍 性能调优时,我们通常需要分析查找到程序百分比高的热点代码片段,这便需要使用 perf record 记录单个函数级别的统计信息,并使用 perf report 来显示统计结果 ...
- 用Html5制作的一款数学教学程序Function Graphics(绘制函数图的程序)
最近我不仅对游戏开发感兴趣,还对函数图感兴趣,特此我开发了这个程序.以下是一些介绍和下载演示地址,喜欢的朋友可以看看: 一,产品名片 产品名:Function Graphics 版本: 0.1 开发者 ...
随机推荐
- Java工程师可以从事的几大职业
在重庆,程序员一直被认为是高薪职业,Java作为最受欢迎的语言,是很多初入行的人都会选择的方向.那么学习之后可以从事哪些工作呢?下面小编就给大家介绍一下. 一.大数据技术 Hadoop以及其他大数据处 ...
- 给jumpserver双机配置glusterfs共享复制卷
为什么要使用glusterfs呢. 本身Haproxy+Keepalived对jumpserver进行了负载均衡和反向代理.但是真实的视频只会存储在一个节点上 否则播放视频的时候会出现找不到的情况 为 ...
- 2、Linux安装jmeter
1.下载地址 http://jmeter.apache.org/download_jmeter.cgi 2.选择下载,本地选择,Source版会比较坑 3.安装jdk8,过程省略,可参考:https: ...
- python requests 请求禁用SSL警告信息解决
Python3 requests模块发送HTTPS请求,关闭SSL 验证,控制台会输出以下错误: InsecureRequestWarning: Unverified HTTPS request is ...
- httpd基于域名虚拟主机配置
什么是虚拟主机 在一个Apache服务器上可以配置多个虚拟主机,实现一个服务器提供多站点服务,其实就是访问同一个服务器上的不同目录. httpd支持多种方式的虚拟主机的配置,主要有以下种: 基于IP ...
- win10自动更新后SQLServer无法启动的问题排查
今天中午windows提示更新系统补丁并重启后发现,本地的SQL Server服务器没有正常启动,手工启动sqlserver也失败了,报错:找不到ERRORLOG文件及相应目录. 很是奇怪.强制创建该 ...
- 【CentOS】设置定时执行任务
1.Crond服务启动状态确认 service crond status crond (pid ) を実行中... 2.追加新的执行任务 crontab -e #Ansible: dnsmasq fo ...
- Java中所涉及到的设计模式小记
一.JAVA设计模式一共有23中.其中这23中大体可以分为3类,具体分法如下所示: 1.创建型模式:涉及到的设计模式共5种,分别是: 工厂方法模式.抽象工厂模式.单例模式.建造者模式.原型模式 2.结 ...
- docker-容器,仓库
---恢复内容开始--- 前言: 学技术不能该断时间,连续的学习才是最好的学习方式. 00x1: 创建一个容器:docker create -it xxxx 而启动容器就有两种状态了,第一:新容器启动 ...
- Excel 2016 密码保护破解
Excel 2016的密码保护可以区分为几个: 文件密码保护(Excel中叫做工作簿保护) 文件打开权限密码 文件修改权限密码 工作表保护 关于各个保护密码的设置方式请查阅其他资料,我的情况是之前自己 ...