//#include "duiqi.hpp"

#include "kinect.h"

#include <iostream>

#include "opencv2/opencv.hpp"

#include <opencv2/core/core.hpp>

#include <opencv2/highgui/highgui.hpp>

#include <fstream>

using namespace cv;

using namespace std;

Mat depthFilter(UINT16 *depthData);

template<class Interface>

inline void SafeRelease(Interface *& pInterfaceToRelease)

{

if (pInterfaceToRelease != NULL)

{

pInterfaceToRelease->Release();

pInterfaceToRelease = NULL;

}

}

UINT16 uDepthMin = 0, uDepthMax = 0;

int main()

{

IKinectSensor* m_pKinectSensor;

HRESULT hr;

ICoordinateMapper*      m_pCoordinateMapper = NULL;

CameraIntrinsics* m_pCameraIntrinsics = new CameraIntrinsics();

hr = GetDefaultKinectSensor(&m_pKinectSensor);

if (FAILED(hr))

{

return hr;

}

IMultiSourceFrameReader* m_pMultiFrameReader = NULL;

if (m_pKinectSensor)

{

hr = m_pKinectSensor->Open();

if (SUCCEEDED(hr))

{

hr = m_pKinectSensor->OpenMultiSourceFrameReader(

FrameSourceTypes::FrameSourceTypes_Color |

FrameSourceTypes::FrameSourceTypes_Infrared |

FrameSourceTypes::FrameSourceTypes_Depth,

&m_pMultiFrameReader);

}

}

if (SUCCEEDED(hr))

{

hr = m_pKinectSensor->get_CoordinateMapper(&m_pCoordinateMapper);

}

if (!m_pKinectSensor || FAILED(hr))

{

return E_FAIL;

}

IMultiSourceFrame* m_pMultiFrame = nullptr;

IDepthFrameReference* m_pDepthFrameReference = NULL;

IColorFrameReference* m_pColorFrameReference = NULL;

IInfraredFrameReference* m_pInfraredFrameReference = NULL;

IInfraredFrame* m_pInfraredFrame = NULL;

IDepthFrame* m_pDepthFrame = NULL;

IColorFrame* m_pColorFrame = NULL;

Mat rgb(1080, 1920, CV_8UC4);

Mat rgb_resize(540, 960, CV_8UC4);

DepthSpacePoint*        m_pDepthCoordinates = NULL;

ColorSpacePoint*        m_pColorCoordinates = NULL;

CameraSpacePoint*       m_pCameraCoordinates = NULL;

m_pColorCoordinates = new ColorSpacePoint[512 * 424];

m_pCameraCoordinates = new CameraSpacePoint[512 * 424];

UINT16 *depthData = new UINT16[424 * 512];

Mat depth(424, 512, CV_16UC1);

Mat depth8U(424, 512, CV_8U);

vector<DepthSpacePoint> depthSpacePoints(1920 * 1080);

Mat CoordinateMapperMat(1080, 1520, CV_8U);

Mat CoordinateMapperMat_resize(540, 960, CV_8U);

int savecount = 0;

while (true)

{

hr = m_pMultiFrameReader->AcquireLatestFrame(&m_pMultiFrame);

if (FAILED(hr) || !m_pMultiFrame)

{

continue;

}

if (SUCCEEDED(hr))

hr = m_pMultiFrame->get_ColorFrameReference(&m_pColorFrameReference);

if (SUCCEEDED(hr))

hr = m_pColorFrameReference->AcquireFrame(&m_pColorFrame);

if (SUCCEEDED(hr))

hr = m_pMultiFrame->get_DepthFrameReference(&m_pDepthFrameReference);

if (SUCCEEDED(hr))

hr = m_pDepthFrameReference->AcquireFrame(&m_pDepthFrame);

/*m_pDepthFrame->get_DepthMinReliableDistance(&uDepthMin);

m_pDepthFrame->get_DepthMaxReliableDistance(&uDepthMax);

cout << "Reliable Distance: " << uDepthMin << " - " << uDepthMax << endl;*/

savecount++;

cout << savecount << endl;

ostringstream savecountstr;

savecountstr << savecount;

UINT nColorBufferSize = 1920 * 1080 * 4;

if (SUCCEEDED(hr))

{

hr = m_pColorFrame->CopyConvertedFrameDataToArray(nColorBufferSize, reinterpret_cast<BYTE*>(rgb.data), ColorImageFormat::ColorImageFormat_Bgra);

Rect rect(200, 0, 1520, 1080);

Mat rgb_roi = rgb(rect);

resize(rgb_roi, rgb_resize, Size(), 0.4, 0.4);

imshow("color_resize", rgb_resize);

imwrite("D:/file/hust/ARcodes/ARKinect∂‘∆Î181107/ARKinect/save6/color/" + savecountstr.str() + ".png", rgb_resize);

}

UINT nDepthBufferSize = 424 * 512;

if (SUCCEEDED(hr))

{

hr = m_pDepthFrame->CopyFrameDataToArray(nDepthBufferSize, reinterpret_cast<UINT16*>(depthData));

m_pDepthFrame->CopyFrameDataToArray(nDepthBufferSize, reinterpret_cast<UINT16*>(depth.data));

//16位转成了8位显示

depth.convertTo(depth8U, CV_8U, 255.0f / 4500.0f);

imshow("depth", depth8U);

}

//16位数据

Mat filterDepth = depthFilter(depthData);

Mat filterDepth8U;

filterDepth.convertTo(filterDepth8U, CV_8U, 255.0f / 4500.0f);

if (SUCCEEDED(hr))

{

hr = m_pCoordinateMapper->MapColorFrameToDepthSpace(424 * 512, reinterpret_cast<UINT16*>(filterDepth.data), 1920 * 1080, &depthSpacePoints[0]);

}

if (SUCCEEDED(hr))

{

CoordinateMapperMat = Scalar(0, 0, 0, 0); // ∂®“ÂŒ™Mat(colorHeight, colorWidth, CV_8UC4)

for (int y = 0; y < 1080; y++)

{

for (int x = 200; x < 1720; x++)

//for (int x = 0; x < 1920; x++)

{

unsigned int index = y * 1920 + x;

DepthSpacePoint p = depthSpacePoints[index];

if (p.X != -std::numeric_limits<float>::infinity() && p.Y != -std::numeric_limits<float>::infinity())

{

int depthX = static_cast<int>(p.X + 0.2f);

int depthY = static_cast<int>(p.Y + 0.2f);

if ((depthX >= 0) && (depthX < 512) && (depthY >= 0) && (depthY < 424))

{

CoordinateMapperMat.at<uchar>(y, x - 200) = filterDepth8U.at<uchar>(depthY, depthX);

}

}

}

}

resize(CoordinateMapperMat, CoordinateMapperMat_resize, Size(), 0.4, 0.4);

imshow("CoordinateMapper", CoordinateMapperMat_resize);

imwrite("D:/file/hust/ARcodes/ARKinect∂‘∆Î181107/ARKinect/save6/result/" + savecountstr.str() + ".png", CoordinateMapperMat_resize);

}

int c = waitKey(1);

if ((char)c == VK_ESCAPE)

break;

SafeRelease(m_pColorFrame);

SafeRelease(m_pDepthFrame);

SafeRelease(m_pColorFrameReference);

SafeRelease(m_pDepthFrameReference);

SafeRelease(m_pMultiFrame);

}

cv::destroyAllWindows();

SafeRelease(m_pCoordinateMapper);

m_pKinectSensor->Close();

std::system("pause");

return 0;

}

Mat depthFilter(UINT16 *depthData) {

Mat i_before(424, 512, CV_8UC4);

Mat i_after(424, 512, CV_8UC4);

Mat i_result(424, 512, CV_16UC1);

cv::Mat i_result8U;

unsigned short maxDepth = 0;

unsigned short iZeroCountBefore = 0;

unsigned short iZeroCountAfter = 0;

unsigned short* depthArray = (unsigned short*)depthData;

for (int i = 0; i < 512 * 424; i++)

{

int row = i / 512;

int col = i % 512;

unsigned short depthValue = depthArray[row * 512 + col];

if (depthValue == 0)

{

i_before.data[i * 4] = 255;

i_before.data[i * 4 + 1] = 0;

i_before.data[i * 4 + 2] = 0;

i_before.data[i * 4 + 3] = depthValue / 256;

iZeroCountBefore++;

}

else

{

i_before.data[i * 4] = depthValue / 4500.0f * 256;

i_before.data[i * 4 + 1] = depthValue / 4500.0f * 256;

i_before.data[i * 4 + 2] = depthValue / 4500.0f * 256;

i_before.data[i * 4 + 3] = depthValue / 4500.0f * 256;

}

maxDepth = depthValue > maxDepth ? depthValue : maxDepth;

}

//cout << "max depth value: " << maxDepth << endl;

unsigned short* smoothDepthArray = (unsigned short*)i_result.data;

int widthBound = 512 - 1;

int heightBound = 424 - 1;

int innerBandThreshold = 1;

int outerBandThreshold = 3;

for (int depthArrayRowIndex = 0; depthArrayRowIndex<424; depthArrayRowIndex++)

{

for (int depthArrayColumnIndex = 0; depthArrayColumnIndex < 512; depthArrayColumnIndex++)

{

int depthIndex = depthArrayColumnIndex + (depthArrayRowIndex * 512);

if (depthArray[depthIndex] == 0)

{

int x = depthIndex % 512;

int y = (depthIndex - x) / 512;

unsigned short filterCollection[24][2] = { 0 };

int innerBandCount = 0;

int outerBandCount = 0;

for (int yi = -2; yi < 3; yi++)

{

for (int xi = -2; xi < 3; xi++)

{

if (xi != 0 || yi != 0)

{

int xSearch = x + xi;

int ySearch = y + yi;

if (xSearch >= 0 && xSearch <= widthBound &&

ySearch >= 0 && ySearch <= heightBound)

{

int index = xSearch + (ySearch * 512);

if (depthArray[index] != 0)

{

for (int i = 0; i < 24; i++)

{

if (filterCollection[i][0] == depthArray[index])

{

filterCollection[i][1]++;

break;

}

else if (filterCollection[i][0] == 0)

{

filterCollection[i][0] = depthArray[index];

filterCollection[i][1]++;

break;

}

}

if (yi != 2 && yi != -2 && xi != 2 && xi != -2)

innerBandCount++;

else

outerBandCount++;

}

}

}

}

}

if (innerBandCount >= innerBandThreshold || outerBandCount >= outerBandThreshold)

{

short frequency = 0;

short depth = 0;

for (int i = 0; i < 24; i++)

{

if (filterCollection[i][0] == 0)

break;

if (filterCollection[i][1] > frequency)

{

depth = filterCollection[i][0];

frequency = filterCollection[i][1];

}

}

smoothDepthArray[depthIndex] = depth;

}

else

{

smoothDepthArray[depthIndex] = 0;

}

}

else

{

smoothDepthArray[depthIndex] = depthArray[depthIndex];

}

}

}

for (int i = 0; i < 512 * 424; i++)

{

int row = i / 512;

int col = i % 512;

unsigned short depthValue = smoothDepthArray[row * 512 + col];

if (depthValue == 0)

{

i_after.data[i * 4] = 255;

i_after.data[i * 4 + 1] = 0;

i_after.data[i * 4 + 2] = 0;

i_after.data[i * 4 + 3] = depthValue / 256;

iZeroCountAfter++;

}

else

{

i_after.data[i * 4] = depthValue / 4500.0f * 256;

i_after.data[i * 4 + 1] = depthValue / 4500.0f * 256;

i_after.data[i * 4 + 2] = depthValue / 4500.0f * 256;

i_after.data[i * 4 + 3] = depthValue / 4500.0f * 256;

}

}

i_result.convertTo(i_result8U, CV_8U, 255.0f / 4500.0f);   // uDepthMax

return i_result;

}

kinect 深度图与彩色图对齐程序的更多相关文章

  1. OpenNI1.5获取华硕XtionProLive深度图和彩色图并用OpenCV显示

    华硕XtionPro类似Kinect,都是体感摄像机,可捕捉深度图和彩色图. 具体參数见:http://www.asus.com.cn/Multimedia/Xtion_PRO_LIVE/specif ...

  2. OpenNI2获取华硕XtionProLive深度图和彩色图并用OpenCV显示

    使用OpenNI2打开XtionProLive时有个问题,彩色图分辨率不管怎样设置始终是320*240,深度图倒是能够设成640*480,而OpenNI1.x是能够获取640*480的彩色图的. 彩色 ...

  3. Android-将RGB彩色图转换为灰度图

    package com.example.yanlei.wifi; import android.graphics.Bitmap; import android.graphics.BitmapFacto ...

  4. 基于.Net core3.0 开发的斗图小程序后端+斗图小程序

    为啥要写这么一个小程序? 作为互联网的原住民. 90后程序员的我,从高中开始发QQ小表情. 到之后的熊猫头,蘑菇头. 可以说表情包陪伴我从学校到社会,从青少年到中年.. 而且因为斗图厉害,还找到一个女 ...

  5. QT 实现彩色图亮度均衡,RGB和HSI空间互相转换

    从昨天折腾到今天.再折腾下去我都要上主楼了  大致和灰度图均衡是一样的,主要是不能像平滑什么的直接对R,G,B三个分量进行.这样出来的图像时没法看的.因此我们要对亮度进行均衡.而HSI彩色空间中的分量 ...

  6. perf + Flame Graph火焰图分析程序性能

    1.perf命令简要介绍 性能调优时,我们通常需要分析查找到程序百分比高的热点代码片段,这便需要使用 perf record 记录单个函数级别的统计信息,并使用 perf report 来显示统计结果 ...

  7. [转]perf + 火焰图分析程序性能

    1.perf命令简要介绍 性能调优时,我们通常需要分析查找到程序百分比高的热点代码片段,这便需要使用 perf record 记录单个函数级别的统计信息,并使用 perf report 来显示统计结果 ...

  8. perf + 火焰图分析程序性能

    1.perf命令简要介绍 性能调优时,我们通常需要分析查找到程序百分比高的热点代码片段,这便需要使用 perf record 记录单个函数级别的统计信息,并使用 perf report 来显示统计结果 ...

  9. 用Html5制作的一款数学教学程序Function Graphics(绘制函数图的程序)

    最近我不仅对游戏开发感兴趣,还对函数图感兴趣,特此我开发了这个程序.以下是一些介绍和下载演示地址,喜欢的朋友可以看看: 一,产品名片 产品名:Function Graphics 版本: 0.1 开发者 ...

随机推荐

  1. express中遇到的一个小问题“403”

    这样子的一个express简单项目文件执行会出现403 Forbidden的错误: var express = require('express'); var app = express(); app ...

  2. 通用唯一识别码UUID

    UUID 概念:UUID 是 通用唯一识别码(Universally Unique Identifier)的缩写,目前最广泛应用的UUID,是微软公司的全局唯一标识符(GUID),而其他重要的应用,则 ...

  3. 时钟信号的占空比调整——Verilog

    时钟信号的占空比调整——Verilog `timescale 1ns / 1ps /////////////////////////////////////////////////////////// ...

  4. Eclipse导入hadoop源码

    在windows中,使用Eclipse阅读hadoop源码,首先到apache官网下载tar.gz的hadoop源码压缩文件,解压. 方法1:(hadoop技术内幕推荐) 打开Eclipse,新建ja ...

  5. 基于Servlet的Echarts例子(2018-12-26更新)

    引子 ECharts是百度出品的,一个使用 JavaScript 实现的开源可视化库.程序员在Web页面上引入并稍作配置就能做出漂亮的数据图表. 本篇文章简单介绍一下如何在JSP中使用Echarts, ...

  6. 关于Oracle字符集在dmp文件导入导出中的乱码影响

    Oracle 在进行dmp备份和还原的时候,服务器端字符集和客户端字符集会对这个过程有较大影响,特别是数据表中存储了中文.存储过程中使用了中文编码(注释)的时候,如果没有处理好字符集的问题,在进行还原 ...

  7. canvas一些属性

    lineTo(x,y) 定义线条结束坐标 moveTo(x,y) 定义线条开始坐标 ctx.stroke();绘制空心图形 ctx.fill();填充图形 把当前路径环绕起来的区域进行填充 ctx.f ...

  8. [UE4]Add Offset

    在原来值的基础上增加偏移

  9. C#后台接java接口传输字节数组(byte[])

    事情是这样的C#t代码之前接的WCF接口,后来那边统一改为java的接口,我是用的HttpClient从后台发请求调用的java接口,其他接口都很顺利,是的....知道遇到一个需要传byte[]类型数 ...

  10. 阿里云服务器 centos7 中继邮箱+转发服务 详细配置

    阿里云centos7 邮箱转发服务配置 一.文档编写目的: 网络拓扑图: 阿里云服务器屏蔽掉25端口后,内网服务器如何通过跳板机发送邮件到外网邮箱. 如果是可联网的阿里云机器,可以直接配置mailx使 ...