安装

  • KinectSDK-v1.8-Setup.exe
  • OpenNI-Windows-x86-2.1.0.msi

Qt工程

拷贝 Redist 下内容到 编译的exe所在目录





#include <stdlib.h>
#include <iostream>
#include <conio.h>
#include <string>
#include "OpenNI.h"
#include "opencv2/core/core.hpp"
#include "opencv2/highgui/highgui.hpp"
#include "opencv2/imgproc/imgproc.hpp" #define RESOLUTION 640,480
#define RECORDRESOLUTION 590,440
#define ADRESOLUTION 45,40,590,440
#define FPS 20
#define GRAYTH 10
#define REPAIRERANGE 5
#define COLORTH 10 using namespace std;
using namespace cv;
using namespace openni; //Openni status
Status result = STATUS_OK;
//open device
Device device;
//OpenNI2 image
VideoFrameRef oniDepthImg;
VideoFrameRef oniColorImg;
//create stream
VideoStream oniDepthStream;
VideoStream oniColorStream;
//set video mode
VideoMode modeDepth;
VideoMode modeColor;
//OpenCV image
cv::Mat cvDepthImg;
cv::Mat cvDepthImg2;
cv::Mat cvColorImg;
cv::Mat cvColorImg2;
//OpenCV adjusted image
cv::Mat cvAdjustDepthImg;
cv::Mat cvAdjustColorImg;
//Resolution
Size se=Size(RESOLUTION);
Size recordse=Size(RECORDRESOLUTION); void CheckOpenNIError( Status result, string status )
{
if( result != STATUS_OK )
cerr << status << " Error: " << OpenNI::getExtendedError() << endl;
}
void iniOpenNI(void );
void releaseResource(void );
void RemoveNoiseRecord(void );
void RemoveNoise(void );
bool pixelRepaire(int ,int ,int );
bool rangeRepaire(int ,int ,int ); int main( int argc, char** argv )
{
iniOpenNI();
RemoveNoiseRecord();
releaseResource();
return 0;
} void iniOpenNI()
{
// initialize OpenNI2
result = OpenNI::initialize();
CheckOpenNIError( result, "initialize context" ); //open video device
result = device.open( openni::ANY_DEVICE );
CheckOpenNIError( result, "initialize context" ); //creat depth stream
result = oniDepthStream.create( device, openni::SENSOR_DEPTH );
CheckOpenNIError( result, "initialize context" );
//set depth mode
modeDepth.setResolution( RESOLUTION );
modeDepth.setFps( FPS );
modeDepth.setPixelFormat( PIXEL_FORMAT_DEPTH_1_MM );
oniDepthStream.setVideoMode(modeDepth);
// start depth stream
result = oniDepthStream.start();
CheckOpenNIError( result, "initialize context" ); // create color stream
result = oniColorStream.create( device, openni::SENSOR_COLOR );
CheckOpenNIError( result, "initialize context" );
// set color video mode
modeColor.setResolution( RESOLUTION );
modeColor.setFps( FPS );
modeColor.setPixelFormat( PIXEL_FORMAT_RGB888 );
oniColorStream.setVideoMode( modeColor);
// start color stream
result = oniColorStream.start();
CheckOpenNIError( result, "initialize context" ); // set depth and color imge registration mode
if( device.isImageRegistrationModeSupported(IMAGE_REGISTRATION_DEPTH_TO_COLOR ) )
{
cout << "support" << endl;
device.setImageRegistrationMode( IMAGE_REGISTRATION_DEPTH_TO_COLOR );
} } void releaseResource()
{
//OpenNI2 destroy
oniDepthStream.destroy();
oniColorStream.destroy();
device.close();
OpenNI::shutdown();
} void RemoveNoiseRecord()
{
std::cout<<"\t\t\t------RemoveNoiseRecord-------"<<std::endl; char *DepthFilename = "oringindepthvideo.avi";
char *ColorFilename = "oringincolorvideo.avi";
char *removeDepthFilename = "removedepthvideo.avi";
char *removeColorFilename = "removecolorvideo.avi"; VideoWriter removecolorVideoWriter=VideoWriter(removeColorFilename,CV_FOURCC('X','V','I','D'),FPS,recordse);
VideoWriter removedepthVideoWriter=VideoWriter(removeDepthFilename,CV_FOURCC('X','V','I','D'),FPS,recordse);
VideoWriter colorVideoWriter=VideoWriter(ColorFilename,CV_FOURCC('X','V','I','D'),FPS,recordse);
VideoWriter depthVideoWriter=VideoWriter(DepthFilename,CV_FOURCC('X','V','I','D'),FPS,recordse);
// VideoWriter colorVideoWriter=VideoWriter(ColorFilename,CV_FOURCC('X','V','I','D'),FPS,se);
// VideoWriter depthVideoWriter=VideoWriter(DepthFilename,CV_FOURCC('X','V','I','D'),FPS,se);
namedWindow("after-gray",1);
namedWindow("after-depth",1);
namedWindow("befor-color",1);
namedWindow("befor-depth",1);
while(true)
{
if( oniColorStream.readFrame( &oniColorImg ) == STATUS_OK )
{
// convert data into OpenCV type
cv::Mat cvRGBImg( oniColorImg.getHeight(), oniColorImg.getWidth(), CV_8UC3, (void*)oniColorImg.getData() );
cv::cvtColor( cvRGBImg, cvColorImg2, CV_RGB2BGR );
cvColorImg2=Mat(cvColorImg2,Rect(ADRESOLUTION));
colorVideoWriter.write(cvColorImg2);
cv::imshow("befor-color", cvColorImg2 );
cvtColor(cvRGBImg,cvColorImg,CV_RGB2GRAY);
//colorVideoWriter.write(cvColorImg);
}
if( oniDepthStream.readFrame( &oniDepthImg ) == STATUS_OK )
{
cv::Mat cvRawImg16U( oniDepthImg.getHeight(), oniDepthImg.getWidth(), CV_16UC1, (void*)oniDepthImg.getData() );
cvRawImg16U.convertTo( cvDepthImg, CV_8UC1, 255.0/(oniDepthStream.getMaxPixelValue()));
cv::cvtColor(cvDepthImg,cvDepthImg2,CV_GRAY2BGR);
cvDepthImg2=Mat(cvDepthImg2,Rect(ADRESOLUTION));
depthVideoWriter.write(cvDepthImg2);
cv::imshow( "befor-depth", cvDepthImg2 );
} cvAdjustDepthImg=Mat(cvDepthImg,Rect(ADRESOLUTION));
cvAdjustColorImg=Mat(cvColorImg,Rect(ADRESOLUTION)); RemoveNoise(); cvtColor(cvAdjustColorImg,cvAdjustColorImg,CV_GRAY2BGR);
cvtColor(cvAdjustDepthImg,cvAdjustDepthImg,CV_GRAY2BGR); removecolorVideoWriter.write(cvAdjustColorImg);
removedepthVideoWriter.write(cvAdjustDepthImg);
std::cout<<"removecolorVideoWriter"<<std::endl;
std::cout<<"\tremovedepthVideoWriter"<<std::endl; imshow("after-gray",cvAdjustColorImg);
imshow("after-depth",cvAdjustDepthImg); int key;
key=waitKey(10);
if(key==27)
{
break;
}
}
removecolorVideoWriter.release ();
removedepthVideoWriter.release ();
colorVideoWriter.release ();
depthVideoWriter.release (); destroyWindow("after-gray");
destroyWindow("after-depth");
destroyWindow("befor-color");
destroyWindow("befor-depth");
} void RemoveNoise()
{
clock_t start,finish;
double totaltime=0.0;
start=clock();
for(int j=(cvAdjustDepthImg.rows-1);j>=0;j--)//depthImage.rows
{
const uchar* mj=cvAdjustDepthImg.ptr<uchar>(j);
for(int i=(cvAdjustDepthImg.cols-1);i>=0;i--)//depthImage.cols
{
if(mj[i]<=GRAYTH)
{
uchar colorpixel=cvAdjustColorImg.at<uchar>(j,i);
bool reResult=false;
if(colorpixel<GRAYTH*5)
{
for(int k=1;k<REPAIRERANGE*3;k++)
{
reResult=pixelRepaire(i,j,k);
if(reResult)
break;
}
//go down
if(!reResult)
{
for(int k=1;k<=30;k++)
{
if((j+k)<440)
{
if(cvAdjustDepthImg.at<uchar>(j+k,i)>GRAYTH)
{
cvAdjustDepthImg.at<uchar>(j,i)=cvAdjustDepthImg.at<uchar>(j+k,i);
reResult=true;
break;
}
}
else
{
break;
}
}
}
//go up
if(!reResult)
{
for(int k=1;k<=30;k++)
{
if((j-k)>=0)
{
if(cvAdjustDepthImg.at<uchar>(j-k,i)>GRAYTH)
{
cvAdjustDepthImg.at<uchar>(j,i)=cvAdjustDepthImg.at<uchar>(j-k,i);
reResult=true;
break;
}
}
else
{
break;
}
}
}
}
else
{
for(int k=1;k<30;k++)
{
if((i+k)<590 && !reResult)
{
if(abs(cvAdjustColorImg.at<uchar>(j,i+k)-colorpixel)<=COLORTH && cvAdjustDepthImg.at<uchar>(j,i+k)>GRAYTH)
{
cvAdjustDepthImg.at<uchar>(j,i)=cvAdjustDepthImg.at<uchar>(j,i+k);
reResult=true;
}
}
else
{
break;
}
}
}
if(!reResult)
{
for(int k=1;k<REPAIRERANGE;k++)
{
reResult=pixelRepaire(i,j,k);
if(reResult)
break;
}
}
if(!reResult)
{
for(int k=0;k<REPAIRERANGE*3;k++)
{
reResult=rangeRepaire(i,j,k);
if(reResult)
break;
}
}
}
}
}
finish=clock();
totaltime=(double)(finish-start)/CLOCKS_PER_SEC;
} bool pixelRepaire(int i,int j,int repaireRange)
{
uchar colorpixel=cvAdjustColorImg.at<uchar>(j,i);
int x=0;
int y=0;
int n=0;
int sum=0;
for(y=j-repaireRange;y<=j+repaireRange;y++)
{
if(y>=0 && y<440)
{ if(y==(j-repaireRange) || y==(j+repaireRange))
{
for(x=i-repaireRange;x<=i+repaireRange;x++)
{
if(x>=0 && x<590)
{
if(abs(cvAdjustColorImg.at<uchar>(y,x)-colorpixel)<=COLORTH && cvAdjustDepthImg.at<uchar>(y,x)>GRAYTH)
{
n++;
sum=sum+cvAdjustDepthImg.at<uchar>(y,x);
}
} }
} else
{
x=i-repaireRange;
if(x>=0 && x<590)
{
if(abs(cvAdjustColorImg.at<uchar>(y,x)-colorpixel)<=COLORTH && cvAdjustDepthImg.at<uchar>(y,x)>GRAYTH)
{
n++;
sum=sum+cvAdjustDepthImg.at<uchar>(y,x);
}
}
x=i+repaireRange;
if(x>=0 && x<590)
{
if(abs(cvAdjustColorImg.at<uchar>(y,x)-colorpixel)<=COLORTH && cvAdjustDepthImg.at<uchar>(y,x)>GRAYTH)
{
n++;
sum=sum+cvAdjustDepthImg.at<uchar>(y,x);
}
}
}
}
}
if(n<repaireRange*2)
{return false;}
else
{
cvAdjustDepthImg.at<uchar>(j,i)=(uchar)(sum/n);
return true;
} } bool rangeRepaire(int i,int j,int repaireRange)
{
uchar colorpixel=cvAdjustColorImg.at<uchar>(j,i);
int x=0;
int y=0;
int n=0;
int sum=0;
for(y=j-repaireRange;y<=j+repaireRange;y++)
{
if(y>=0 && y<440)
{
for(x=i-repaireRange;x<=i+repaireRange;x++)
{
if(x>=0 && x<590)
{
if(cvAdjustDepthImg.at<uchar>(y,x)>GRAYTH)
{
n++;
sum=sum+cvAdjustDepthImg.at<uchar>(y,x);
}
}
}
}
}
if(n<=repaireRange*2)
{
return false;
}
else
{
cvAdjustDepthImg.at<uchar>(j,i)=(uchar)(sum/n);
return true;
}
}

source code

http://git.oschina.net/yuliyang/kinectRecorder

kinect 录制彩色和深度视频的更多相关文章

  1. Kinect SDK(1):读取彩色、深度、骨骼信息并用OpenCV显示

    Kinect SDK 读取彩色.深度.骨骼信息并用OpenCV显示 一.原理说明 对于原理相信大家都明白大致的情况,因此,在此只说比较特别的部分. 1.1 深度流数据: 深度数据流所提供的图像帧中,每 ...

  2. Kinect v1 (Microsoft Kinect for Windows v1 )彩色和深度图像对的采集步骤

    Kinect v1 (Microsoft Kinect for Windows v1 )彩色和深度图像对的采集步骤 一.在ubuntu下尝试 1. 在虚拟机VWware Workstation 12. ...

  3. Realsense 提取彩色和深度视频流

    一.简要介绍 关于realsense的介绍,网上很多,这里不再赘述,sdk及相关文档可参考realsense SDK,也可参考开发人员专区. 运行代码之前,要确保你已经安装好了realsense的DC ...

  4. 使用Flash Media Server(FMS)录制mp4格式的视频

    最近在做一个有关视频直播和点播的项目,客户的一个需求就是可以控制对直播流的录制,直播的实现采用的是Adobe的Flash Media Server,具体方式就是:视频采集端采集视频并编码->rt ...

  5. OpenNI结合Unity3D Kinect进行体感游戏开发(转)

    OpenNI结合Unity3D Kinect进行体感游戏开发(转) 楼主# 更多 发布于:2012-07-17 16:42     1. 下载安装Unity3D(目前版本为3.4)2. 下载OpenN ...

  6. ROS indigo下Kinect v1的驱动安装与调试

    ROS indigo下Kinect v1的驱动安装与调试 本文简要叙述了在ROS indigo版本下Kinect v1的驱动安装与调试过程. 1. 实验环境 (1)硬件:  台式机和Kinect v1 ...

  7. 高翔《视觉SLAM十四讲》从理论到实践

    目录 第1讲 前言:本书讲什么:如何使用本书: 第2讲 初始SLAM:引子-小萝卜的例子:经典视觉SLAM框架:SLAM问题的数学表述:实践-编程基础: 第3讲 三维空间刚体运动 旋转矩阵:实践-Ei ...

  8. 我的第一个项目:用kinect录视频库

    kinect深度视频去噪 kinectmod32.dll http://pan.baidu.com/s/1DsGqX 下载后改名kinect.dll 替换掉Redist\OpenNI2\Drivers ...

  9. 【计算机视觉】深度相机(五)--Kinect v2.0

    原文:http://blog.csdn.NET/qq1175421841/article/details/50412994 ----微软Build2012大会:Kinect for Windows P ...

随机推荐

  1. WordPress主题制作教程1:文件构成

    在最简单的情况下,一个WordPress主题由两个文件构成: index.php ------------------主模版 style.css  -------------------主样式表 以下 ...

  2. Java:多态性

    Java的多态性:发送消息给某个对象,让该对象自行决定响应何种行为. 通过将子类对象引用赋值给超类对象引用变量来实现动态方法调用. java的多态性要满足三个条件: 1.继承关系 2.在子类重写父类的 ...

  3. javascript library

    <!DOCTYPE HTML> <html lang="en-US"> <head> <meta charset="UTF-8& ...

  4. 【Cocosd2d实例教程二】地图编辑器Tiled的安装使用

    (转载请注明出处:http://blog.csdn.net/buptgshengod) 我们知道cocos2d是一个基于2d效果的游戏引擎,那么如果制作一个2d手机游戏我们需要创建相应的游戏画面,而c ...

  5. 8.cadence.CIS[原创]

    一.CIS数据库配置 ------ ---------------------------- --------------- ------------------ ---- 二.CIS放置元件 --- ...

  6. HDFS的体系结构和操作

    1.对hdfs操作的命令格式是hadoop fs 1.1 -ls <path> 表示对hdfs下一级目录的查看 1.2 -lsr <path> 表示对hdfs目录的递归查看 1 ...

  7. plsql programming 18 包

    所谓包, 就是把一组PL/SQL 的代码元素组织在一个命名空间下. 另外, 包的用法就类似java中的类.( 有封装, 有重载, 没有继承和多肽) create or replace procedur ...

  8. URAL1900 Brainwashing Device(dp)

    1900 二维dp挺好推 dp[i][j] = max(dp[i][j],dp[g][j-1]+o[i][i+1]-o[g][i+1])(i>g>=j-1) dp[i][j]表示第i个站台 ...

  9. Android开源库--SlidingMenu左右侧滑菜单

    如果说我比别人看得更远些,那是因为我站在了巨人的肩上.   github地址:https://github.com/jfeinstein10/SlidingMenu   设置: 1.下载之后以依赖项的 ...

  10. ionic中极光推送的集成

    1.到极光官网注册账号,新建应用获得appkey. 详见:https://www.jiguang.cn/app/list 2.引入jpush插件 详见:https://github.com/jpush ...