安装

  • KinectSDK-v1.8-Setup.exe
  • OpenNI-Windows-x86-2.1.0.msi

Qt工程

拷贝 Redist 下内容到 编译的exe所在目录





#include <stdlib.h>
#include <iostream>
#include <conio.h>
#include <string>
#include "OpenNI.h"
#include "opencv2/core/core.hpp"
#include "opencv2/highgui/highgui.hpp"
#include "opencv2/imgproc/imgproc.hpp" #define RESOLUTION 640,480
#define RECORDRESOLUTION 590,440
#define ADRESOLUTION 45,40,590,440
#define FPS 20
#define GRAYTH 10
#define REPAIRERANGE 5
#define COLORTH 10 using namespace std;
using namespace cv;
using namespace openni; //Openni status
Status result = STATUS_OK;
//open device
Device device;
//OpenNI2 image
VideoFrameRef oniDepthImg;
VideoFrameRef oniColorImg;
//create stream
VideoStream oniDepthStream;
VideoStream oniColorStream;
//set video mode
VideoMode modeDepth;
VideoMode modeColor;
//OpenCV image
cv::Mat cvDepthImg;
cv::Mat cvDepthImg2;
cv::Mat cvColorImg;
cv::Mat cvColorImg2;
//OpenCV adjusted image
cv::Mat cvAdjustDepthImg;
cv::Mat cvAdjustColorImg;
//Resolution
Size se=Size(RESOLUTION);
Size recordse=Size(RECORDRESOLUTION); void CheckOpenNIError( Status result, string status )
{
if( result != STATUS_OK )
cerr << status << " Error: " << OpenNI::getExtendedError() << endl;
}
void iniOpenNI(void );
void releaseResource(void );
void RemoveNoiseRecord(void );
void RemoveNoise(void );
bool pixelRepaire(int ,int ,int );
bool rangeRepaire(int ,int ,int ); int main( int argc, char** argv )
{
iniOpenNI();
RemoveNoiseRecord();
releaseResource();
return 0;
} void iniOpenNI()
{
// initialize OpenNI2
result = OpenNI::initialize();
CheckOpenNIError( result, "initialize context" ); //open video device
result = device.open( openni::ANY_DEVICE );
CheckOpenNIError( result, "initialize context" ); //creat depth stream
result = oniDepthStream.create( device, openni::SENSOR_DEPTH );
CheckOpenNIError( result, "initialize context" );
//set depth mode
modeDepth.setResolution( RESOLUTION );
modeDepth.setFps( FPS );
modeDepth.setPixelFormat( PIXEL_FORMAT_DEPTH_1_MM );
oniDepthStream.setVideoMode(modeDepth);
// start depth stream
result = oniDepthStream.start();
CheckOpenNIError( result, "initialize context" ); // create color stream
result = oniColorStream.create( device, openni::SENSOR_COLOR );
CheckOpenNIError( result, "initialize context" );
// set color video mode
modeColor.setResolution( RESOLUTION );
modeColor.setFps( FPS );
modeColor.setPixelFormat( PIXEL_FORMAT_RGB888 );
oniColorStream.setVideoMode( modeColor);
// start color stream
result = oniColorStream.start();
CheckOpenNIError( result, "initialize context" ); // set depth and color imge registration mode
if( device.isImageRegistrationModeSupported(IMAGE_REGISTRATION_DEPTH_TO_COLOR ) )
{
cout << "support" << endl;
device.setImageRegistrationMode( IMAGE_REGISTRATION_DEPTH_TO_COLOR );
} } void releaseResource()
{
//OpenNI2 destroy
oniDepthStream.destroy();
oniColorStream.destroy();
device.close();
OpenNI::shutdown();
} void RemoveNoiseRecord()
{
std::cout<<"\t\t\t------RemoveNoiseRecord-------"<<std::endl; char *DepthFilename = "oringindepthvideo.avi";
char *ColorFilename = "oringincolorvideo.avi";
char *removeDepthFilename = "removedepthvideo.avi";
char *removeColorFilename = "removecolorvideo.avi"; VideoWriter removecolorVideoWriter=VideoWriter(removeColorFilename,CV_FOURCC('X','V','I','D'),FPS,recordse);
VideoWriter removedepthVideoWriter=VideoWriter(removeDepthFilename,CV_FOURCC('X','V','I','D'),FPS,recordse);
VideoWriter colorVideoWriter=VideoWriter(ColorFilename,CV_FOURCC('X','V','I','D'),FPS,recordse);
VideoWriter depthVideoWriter=VideoWriter(DepthFilename,CV_FOURCC('X','V','I','D'),FPS,recordse);
// VideoWriter colorVideoWriter=VideoWriter(ColorFilename,CV_FOURCC('X','V','I','D'),FPS,se);
// VideoWriter depthVideoWriter=VideoWriter(DepthFilename,CV_FOURCC('X','V','I','D'),FPS,se);
namedWindow("after-gray",1);
namedWindow("after-depth",1);
namedWindow("befor-color",1);
namedWindow("befor-depth",1);
while(true)
{
if( oniColorStream.readFrame( &oniColorImg ) == STATUS_OK )
{
// convert data into OpenCV type
cv::Mat cvRGBImg( oniColorImg.getHeight(), oniColorImg.getWidth(), CV_8UC3, (void*)oniColorImg.getData() );
cv::cvtColor( cvRGBImg, cvColorImg2, CV_RGB2BGR );
cvColorImg2=Mat(cvColorImg2,Rect(ADRESOLUTION));
colorVideoWriter.write(cvColorImg2);
cv::imshow("befor-color", cvColorImg2 );
cvtColor(cvRGBImg,cvColorImg,CV_RGB2GRAY);
//colorVideoWriter.write(cvColorImg);
}
if( oniDepthStream.readFrame( &oniDepthImg ) == STATUS_OK )
{
cv::Mat cvRawImg16U( oniDepthImg.getHeight(), oniDepthImg.getWidth(), CV_16UC1, (void*)oniDepthImg.getData() );
cvRawImg16U.convertTo( cvDepthImg, CV_8UC1, 255.0/(oniDepthStream.getMaxPixelValue()));
cv::cvtColor(cvDepthImg,cvDepthImg2,CV_GRAY2BGR);
cvDepthImg2=Mat(cvDepthImg2,Rect(ADRESOLUTION));
depthVideoWriter.write(cvDepthImg2);
cv::imshow( "befor-depth", cvDepthImg2 );
} cvAdjustDepthImg=Mat(cvDepthImg,Rect(ADRESOLUTION));
cvAdjustColorImg=Mat(cvColorImg,Rect(ADRESOLUTION)); RemoveNoise(); cvtColor(cvAdjustColorImg,cvAdjustColorImg,CV_GRAY2BGR);
cvtColor(cvAdjustDepthImg,cvAdjustDepthImg,CV_GRAY2BGR); removecolorVideoWriter.write(cvAdjustColorImg);
removedepthVideoWriter.write(cvAdjustDepthImg);
std::cout<<"removecolorVideoWriter"<<std::endl;
std::cout<<"\tremovedepthVideoWriter"<<std::endl; imshow("after-gray",cvAdjustColorImg);
imshow("after-depth",cvAdjustDepthImg); int key;
key=waitKey(10);
if(key==27)
{
break;
}
}
removecolorVideoWriter.release ();
removedepthVideoWriter.release ();
colorVideoWriter.release ();
depthVideoWriter.release (); destroyWindow("after-gray");
destroyWindow("after-depth");
destroyWindow("befor-color");
destroyWindow("befor-depth");
} void RemoveNoise()
{
clock_t start,finish;
double totaltime=0.0;
start=clock();
for(int j=(cvAdjustDepthImg.rows-1);j>=0;j--)//depthImage.rows
{
const uchar* mj=cvAdjustDepthImg.ptr<uchar>(j);
for(int i=(cvAdjustDepthImg.cols-1);i>=0;i--)//depthImage.cols
{
if(mj[i]<=GRAYTH)
{
uchar colorpixel=cvAdjustColorImg.at<uchar>(j,i);
bool reResult=false;
if(colorpixel<GRAYTH*5)
{
for(int k=1;k<REPAIRERANGE*3;k++)
{
reResult=pixelRepaire(i,j,k);
if(reResult)
break;
}
//go down
if(!reResult)
{
for(int k=1;k<=30;k++)
{
if((j+k)<440)
{
if(cvAdjustDepthImg.at<uchar>(j+k,i)>GRAYTH)
{
cvAdjustDepthImg.at<uchar>(j,i)=cvAdjustDepthImg.at<uchar>(j+k,i);
reResult=true;
break;
}
}
else
{
break;
}
}
}
//go up
if(!reResult)
{
for(int k=1;k<=30;k++)
{
if((j-k)>=0)
{
if(cvAdjustDepthImg.at<uchar>(j-k,i)>GRAYTH)
{
cvAdjustDepthImg.at<uchar>(j,i)=cvAdjustDepthImg.at<uchar>(j-k,i);
reResult=true;
break;
}
}
else
{
break;
}
}
}
}
else
{
for(int k=1;k<30;k++)
{
if((i+k)<590 && !reResult)
{
if(abs(cvAdjustColorImg.at<uchar>(j,i+k)-colorpixel)<=COLORTH && cvAdjustDepthImg.at<uchar>(j,i+k)>GRAYTH)
{
cvAdjustDepthImg.at<uchar>(j,i)=cvAdjustDepthImg.at<uchar>(j,i+k);
reResult=true;
}
}
else
{
break;
}
}
}
if(!reResult)
{
for(int k=1;k<REPAIRERANGE;k++)
{
reResult=pixelRepaire(i,j,k);
if(reResult)
break;
}
}
if(!reResult)
{
for(int k=0;k<REPAIRERANGE*3;k++)
{
reResult=rangeRepaire(i,j,k);
if(reResult)
break;
}
}
}
}
}
finish=clock();
totaltime=(double)(finish-start)/CLOCKS_PER_SEC;
} bool pixelRepaire(int i,int j,int repaireRange)
{
uchar colorpixel=cvAdjustColorImg.at<uchar>(j,i);
int x=0;
int y=0;
int n=0;
int sum=0;
for(y=j-repaireRange;y<=j+repaireRange;y++)
{
if(y>=0 && y<440)
{ if(y==(j-repaireRange) || y==(j+repaireRange))
{
for(x=i-repaireRange;x<=i+repaireRange;x++)
{
if(x>=0 && x<590)
{
if(abs(cvAdjustColorImg.at<uchar>(y,x)-colorpixel)<=COLORTH && cvAdjustDepthImg.at<uchar>(y,x)>GRAYTH)
{
n++;
sum=sum+cvAdjustDepthImg.at<uchar>(y,x);
}
} }
} else
{
x=i-repaireRange;
if(x>=0 && x<590)
{
if(abs(cvAdjustColorImg.at<uchar>(y,x)-colorpixel)<=COLORTH && cvAdjustDepthImg.at<uchar>(y,x)>GRAYTH)
{
n++;
sum=sum+cvAdjustDepthImg.at<uchar>(y,x);
}
}
x=i+repaireRange;
if(x>=0 && x<590)
{
if(abs(cvAdjustColorImg.at<uchar>(y,x)-colorpixel)<=COLORTH && cvAdjustDepthImg.at<uchar>(y,x)>GRAYTH)
{
n++;
sum=sum+cvAdjustDepthImg.at<uchar>(y,x);
}
}
}
}
}
if(n<repaireRange*2)
{return false;}
else
{
cvAdjustDepthImg.at<uchar>(j,i)=(uchar)(sum/n);
return true;
} } bool rangeRepaire(int i,int j,int repaireRange)
{
uchar colorpixel=cvAdjustColorImg.at<uchar>(j,i);
int x=0;
int y=0;
int n=0;
int sum=0;
for(y=j-repaireRange;y<=j+repaireRange;y++)
{
if(y>=0 && y<440)
{
for(x=i-repaireRange;x<=i+repaireRange;x++)
{
if(x>=0 && x<590)
{
if(cvAdjustDepthImg.at<uchar>(y,x)>GRAYTH)
{
n++;
sum=sum+cvAdjustDepthImg.at<uchar>(y,x);
}
}
}
}
}
if(n<=repaireRange*2)
{
return false;
}
else
{
cvAdjustDepthImg.at<uchar>(j,i)=(uchar)(sum/n);
return true;
}
}

source code

http://git.oschina.net/yuliyang/kinectRecorder

kinect 录制彩色和深度视频的更多相关文章

  1. Kinect SDK(1):读取彩色、深度、骨骼信息并用OpenCV显示

    Kinect SDK 读取彩色.深度.骨骼信息并用OpenCV显示 一.原理说明 对于原理相信大家都明白大致的情况,因此,在此只说比较特别的部分. 1.1 深度流数据: 深度数据流所提供的图像帧中,每 ...

  2. Kinect v1 (Microsoft Kinect for Windows v1 )彩色和深度图像对的采集步骤

    Kinect v1 (Microsoft Kinect for Windows v1 )彩色和深度图像对的采集步骤 一.在ubuntu下尝试 1. 在虚拟机VWware Workstation 12. ...

  3. Realsense 提取彩色和深度视频流

    一.简要介绍 关于realsense的介绍,网上很多,这里不再赘述,sdk及相关文档可参考realsense SDK,也可参考开发人员专区. 运行代码之前,要确保你已经安装好了realsense的DC ...

  4. 使用Flash Media Server(FMS)录制mp4格式的视频

    最近在做一个有关视频直播和点播的项目,客户的一个需求就是可以控制对直播流的录制,直播的实现采用的是Adobe的Flash Media Server,具体方式就是:视频采集端采集视频并编码->rt ...

  5. OpenNI结合Unity3D Kinect进行体感游戏开发(转)

    OpenNI结合Unity3D Kinect进行体感游戏开发(转) 楼主# 更多 发布于:2012-07-17 16:42     1. 下载安装Unity3D(目前版本为3.4)2. 下载OpenN ...

  6. ROS indigo下Kinect v1的驱动安装与调试

    ROS indigo下Kinect v1的驱动安装与调试 本文简要叙述了在ROS indigo版本下Kinect v1的驱动安装与调试过程. 1. 实验环境 (1)硬件:  台式机和Kinect v1 ...

  7. 高翔《视觉SLAM十四讲》从理论到实践

    目录 第1讲 前言:本书讲什么:如何使用本书: 第2讲 初始SLAM:引子-小萝卜的例子:经典视觉SLAM框架:SLAM问题的数学表述:实践-编程基础: 第3讲 三维空间刚体运动 旋转矩阵:实践-Ei ...

  8. 我的第一个项目:用kinect录视频库

    kinect深度视频去噪 kinectmod32.dll http://pan.baidu.com/s/1DsGqX 下载后改名kinect.dll 替换掉Redist\OpenNI2\Drivers ...

  9. 【计算机视觉】深度相机(五)--Kinect v2.0

    原文:http://blog.csdn.NET/qq1175421841/article/details/50412994 ----微软Build2012大会:Kinect for Windows P ...

随机推荐

  1. photoshop:找不到增效工具入口点

    按Ctrl+k+9 ,到预设的增效工具栏设置为要存储的文件夹路径:

  2. 290. Word Pattern

    题目: Given a pattern and a string str, find if str follows the same pattern. Here follow means a full ...

  3. 在struts-config.xml中配置validator-plugin导致404 Servlet action is not available

    就是在struts-config.xml中添加了这么一段 <plug-in className="org.apache.struts.validator.ValidatorPlugIn ...

  4. B/S和C/S的区别

    B/S 指的是 Browser/Server : C/S 指的是Client/Server 区别: 1. B/S最大优势为客户端免维护,适用于用户群庞大,或客户需求经长发生变化的情况. C/S功能强大 ...

  5. ES6入门之Symbol

    ES5对象属性名都是字符串容易造成属性名的冲突. eg:var a = { name: 'lucy'}; a.name = 'lili';这样就会重写属性 ES6引入了一种新的原始数据类型Symbol ...

  6. 编辑器Emacs下载网址(中国镜像)

      Root gnu emacs windows File Name ↓ File Size ↓ Date ↓ Parent directory/ - - README 14K 2014-Nov-15 ...

  7. AppDomain 应用程序域

    应用程序域 一.什么是应用程序域? 应用程序域 (application domain) (AppDomain) 一种边界,它由公共语言运行库围绕同一应用程序范围内创建的对象建立(即,从应用程序入口点 ...

  8. Entity Framework学习 - 4.Code First升级数据库

    1.在nuget控制台中执行:Enable-Migrations 2.将出现的configuation.cs文件中的AutomaticMigrationsEnabled属性改为true 3.在nuge ...

  9. Java里面instanceof怎么实现的

    开始完全一头雾水呀,后面看了Java指令集的介绍,逐渐理解了. https://www.zhihu.com/question/21574535/answer/18998914 下面这个答案比较直白 你 ...

  10. Excel文件操作方式比较

    C++读取Excel的XLS文件的方法有很多,但是也许就是因为方法太多,大家在选择的时候会很疑惑. 由于前两天要做导表工具,比较了常用的方法,总结一下写个短文, 1.OLE的方式 这个大约是最常用的方 ...