sdl2.0示例
// gcc -o testDrone2_video testDrone2_video.c -lavcodec -lavformat -lswscale -lSDL2
// g++ -o testDrone2_video testDrone2_video.c -lavcodec -lavformat -lswscale -lSDL2
#ifdef __cplusplus
extern "C" {
#endif
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#ifdef __cplusplus
}
#endif
#include "SDL2/SDL.h"
int main(int argc, char* argv[]) {
// 3.0. Initializes the video subsystem *must be done before anything other!!
if (SDL_Init(SDL_INIT_VIDEO) < 0) {
fprintf(stderr, "Unable to init SDL: %s\n", SDL_GetError());
return -1;
}
// prepare variables
// decoding
char *drone_addr = "http://192.168.1.1:5555";
AVFormatContext *pFormatCtx = NULL;
AVCodecContext *pCodecCtx;
AVCodec *pCodec;
AVPacket packet;
AVFrame *pFrame;
int terminate, frameDecoded;
// converting
AVFrame *pFrame_YUV420P;
uint8_t *buffer_YUV420P;
struct SwsContext *pConvertCtx_YUV420P;
AVFrame *pFrame_BGR24;
uint8_t *buffer_BGR24;
struct SwsContext *pConvertCtx_BGR24;
// displaying
SDL_Window *pWindow1;
SDL_Renderer *pRenderer1;
SDL_Texture *bmpTex1;
uint8_t *pixels1;
int pitch1, size1;
SDL_Window *pWindow2;
SDL_Renderer *pRenderer2;
SDL_Texture *bmpTex2;
uint8_t *pixels2;
int pitch2, size2;
// SDL event handling
SDL_Event event;
// 1.1 Register all formats and codecs
av_register_all();
avcodec_register_all();
avformat_network_init();
// 1.2. Open video file
while(avformat_open_input(&pFormatCtx, drone_addr, NULL, NULL) != 0)
printf("Could not open the video file\nRetrying...\n");
// 1.3. Retrieve stream information
avformat_find_stream_info(pFormatCtx, NULL);
// Dump information about file to standard output
av_dump_format(pFormatCtx, 0, drone_addr, 0);
// 1.4. Get a pointer to the codec context for the video stream
// and find the decoder for the video stream
pCodecCtx = pFormatCtx->streams[0]->codec;
pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
// 1.5. Open Codec
avcodec_open2(pCodecCtx, pCodec, NULL);
// 2.1.1. Prepare format conversion for diplaying with SDL
// Allocate an AVFrame structure
pFrame_YUV420P = avcodec_alloc_frame();
if(pFrame_YUV420P == NULL) {
fprintf(stderr, "Could not allocate pFrame_YUV420P\n");
return -1;
}
// Determine required buffer size and allocate buffer
buffer_YUV420P = (uint8_t *)av_malloc(avpicture_get_size(PIX_FMT_YUV420P,
pCodecCtx->width, pCodecCtx->height));
// Assign buffer to image planes
avpicture_fill((AVPicture *)pFrame_YUV420P, buffer_YUV420P,
PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);
// format conversion context
pConvertCtx_YUV420P = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,
pCodecCtx->width, pCodecCtx->height, PIX_FMT_YUV420P,
SWS_SPLINE, NULL, NULL, NULL);
// 2.2.1. Prepare format conversion for OpenCV
// Allocate an AVFrame structure
pFrame_BGR24 = avcodec_alloc_frame();
if(pFrame_BGR24 == NULL) {
fprintf(stderr, "Could not allocate pFrame_YUV420P\n");
return -1;
}
// Determine required buffer size and allocate buffer
buffer_BGR24 = (uint8_t *)av_malloc(avpicture_get_size(PIX_FMT_BGR24,
pCodecCtx->width, pCodecCtx->height));
// Assign buffer to image planes
avpicture_fill((AVPicture *)pFrame_BGR24, buffer_BGR24,
PIX_FMT_BGR24, pCodecCtx->width, pCodecCtx->height);
// format conversion context
pConvertCtx_BGR24 = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,
pCodecCtx->width, pCodecCtx->height, PIX_FMT_BGR24,
SWS_SPLINE, NULL, NULL, NULL);
// 3.1.1 prepare SDL for YUV
// allocate window, renderer, texture
pWindow1 = SDL_CreateWindow( "YUV", 0, 0, pCodecCtx->width, pCodecCtx->height, SDL_WINDOW_SHOWN);
pRenderer1 = SDL_CreateRenderer(pWindow1, -1, SDL_RENDERER_ACCELERATED);
bmpTex1 = SDL_CreateTexture(pRenderer1, SDL_PIXELFORMAT_YV12, SDL_TEXTUREACCESS_STREAMING, pCodecCtx->width, pCodecCtx->height);
size1 = pCodecCtx->width * pCodecCtx->height;
if(pWindow1==NULL | pRenderer1==NULL | bmpTex1==NULL) {
fprintf(stderr, "Could not open window1\n%s\n", SDL_GetError());
return -1;
}
// 3.2.1 prepare SDL for BGR
// allocate window, renderer, texture
pWindow2 = SDL_CreateWindow( "BGR", pCodecCtx->width+5, 0, pCodecCtx->width, pCodecCtx->height, SDL_WINDOW_SHOWN);
pRenderer2 = SDL_CreateRenderer(pWindow2, -1, SDL_RENDERER_ACCELERATED);
bmpTex2 = SDL_CreateTexture(pRenderer2, SDL_PIXELFORMAT_BGR24, SDL_TEXTUREACCESS_STREAMING, pCodecCtx->width, pCodecCtx->height);
size2 = pCodecCtx->width * pCodecCtx->height * 3;
if(pWindow2==NULL | pRenderer2==NULL | bmpTex2==NULL) {
fprintf(stderr, "Could not open window2\n%s\n", SDL_GetError());
return -1;
}
// 1.6. get video frames
pFrame = avcodec_alloc_frame();
terminate = 0;
while(!terminate) {
// read frame
if(av_read_frame(pFormatCtx, &packet)<0) {
fprintf(stderr, "Could not read frame!\n");
continue;
}
// decode the frame
if(avcodec_decode_video2(pCodecCtx, pFrame, &frameDecoded, &packet) < 0) {
fprintf(stderr, "Could not decode frame!\n");
continue;
}
if (frameDecoded) {
// 2.1.2. convert frame to YUV for Displaying
sws_scale(pConvertCtx_YUV420P, (const uint8_t * const*)pFrame->data, pFrame->linesize, 0,
pCodecCtx->height, pFrame_YUV420P->data, pFrame_YUV420P->linesize);
// 2.2.2. convert frame to GRAYSCALE [or BGR] for OpenCV
sws_scale(pConvertCtx_BGR24, (const uint8_t * const*)pFrame->data, pFrame->linesize, 0,
pCodecCtx->height, pFrame_BGR24->data, pFrame_BGR24->linesize);
// 3.1.2. copy converted YUV to SDL 2.0 texture
SDL_LockTexture(bmpTex1, NULL, (void **)&pixels1, &pitch1);
memcpy(pixels1, pFrame_YUV420P->data[0], size1 );
memcpy(pixels1 + size1, pFrame_YUV420P->data[2], size1/4);
memcpy(pixels1 + size1*5/4, pFrame_YUV420P->data[1], size1/4);
SDL_UnlockTexture(bmpTex1);
SDL_UpdateTexture(bmpTex1, NULL, pixels1, pitch1);
// refresh screen
SDL_RenderClear(pRenderer1);
SDL_RenderCopy(pRenderer1, bmpTex1, NULL, NULL);
SDL_RenderPresent(pRenderer1);
// 3.2.2. copy converted BGR to SDL 2.0 texture
SDL_LockTexture(bmpTex2, NULL, (void **)&pixels2, &pitch2);
memcpy(pixels2, pFrame_BGR24->data[0], size2);
SDL_UnlockTexture(bmpTex2);
SDL_UpdateTexture(bmpTex2, NULL, pixels2, pitch2);
// refresh screen
SDL_RenderClear(pRenderer2);
SDL_RenderCopy(pRenderer2, bmpTex2, NULL, NULL);
SDL_RenderPresent(pRenderer2);
}
SDL_PollEvent(&event);
switch (event.type) {
case SDL_KEYDOWN:
terminate = 1;
break;
}
}
// release
// *note SDL objects have to be freed before closing avcodec.
// otherwise it causes segmentation fault for some reason.
SDL_DestroyTexture(bmpTex1);
SDL_DestroyTexture(bmpTex2);
SDL_DestroyRenderer(pRenderer1);
SDL_DestroyRenderer(pRenderer2);
SDL_DestroyWindow(pWindow1);
SDL_DestroyWindow(pWindow2);
av_free(pFrame_YUV420P);
av_free(buffer_YUV420P);
sws_freeContext(pConvertCtx_YUV420P);
av_free(pFrame_BGR24);
av_free(buffer_BGR24);
sws_freeContext(pConvertCtx_BGR24);
av_free(pFrame);
avcodec_close(pCodecCtx); // <- before freeing this, all other objects, allocated after this, must be freed
avformat_close_input(&pFormatCtx);
SDL_Quit();
return 0;
}
sdl2.0示例的更多相关文章
- SDL2.0 VLC ubuntu安装和黑屏问题
开发环境安装: 1,执行:"sudo apt-get build-dep libsdl1.2",确定依赖库都装全了. sdl2.0没有正式发布到ubuntu,使用下面方法安装: h ...
- Win7 64位 MinGW环境测试SDL2.0.3
下载MinGW版的文件 http://www.libsdl.org/release/SDL2-devel-2.0.3-mingw.tar.gz 解压放到mysys下面 运行Makefile mysys ...
- win8.1下golang+sdl2.0环境搭建
sdl2.0的golang绑定我是使用的这个,但是它的官方介绍里面只有linux以及OSX系统的说明,没有windows的,在我的mbp上弄好以后就考虑在win下也搭建一个开发环境,这样就能比较方便的 ...
- SDL2.0学习
http://www.ffmpeg.org/download.html http://doc.okbase.net/leixiaohua1020/archive/110977.html //视频 h ...
- [原]如何在Android用FFmpeg+SDL2.0解码图像线程
关于如何在Android上用FFmpeg+SDL2.0解码显示图像参考[原]如何在Android用FFmpeg+SDL2.0解码显示图像 ,关于如何在Android使用FFmpeg+SDL2.0解码声 ...
- [原]如何在Android用FFmpeg+SDL2.0解码声音
关于如何在Android上用FFmpeg+SDL2.0解码显示图像参考[原]如何在Android用FFmpeg+SDL2.0解码显示图像 ,本文是基于上述文章和[原]零基础学习视频解码之解码声音 来移 ...
- [原]如何在Android用FFmpeg+SDL2.0解码显示图像
如何在Android上使用FFmpeg解码图像参考文章[原]如何在Android用FFmpeg解码图像 ,如何在Android上使用SDL2.0来显示图像参考[原]零基础学习SDL开发之在Androi ...
- SDL2.0的SDL_Event事件处理
SDL_Event事件集合 SDL_AudioDeviceEvent SDL_ControllerAxisEvent SDL_ControllerButtonEvent SDL_ControllerD ...
- SDL2.0的几何图行绘画
SDL2.0的几何图形绘画 通过SDL_Window.SDL_Renderer.SDL_Texture三者实现了简单的几何图形绘画. 包括了SDL_RenderDrawPoint.SDL_Render ...
随机推荐
- 怎样用Google APIs和Google的应用系统进行集成(3)----调用Google 发现(Discovery)API的RESTful服务
说了这么多,那么首先同意我以Google Discovery RESTful服务为例,给大家演示怎样用最普通的Java代码调用Google Discovery RESTful服务. 引言: 在&quo ...
- JAVA ,SSH中文及其乱码问题的解决 6大配置点 使用UTF-8编码
JSP,mysql,tomcat下(基于struts2)中文及其乱码问题的解决 6大配置点 使用UTF-8编码 目前对遇到J2EE 开发中 中文及其乱码问题,参考网上资料做个总结, 主要是6大配置点: ...
- Java基础知识强化75:正则表达式之分割功能(字符串中的数字排序案例)
1. 案例分析: 我有如下一个字符串:"91 27 46 38 50" 写代码实现最终输出结果是:"27 38 46 50 91" 分析: (1)定义一个 ...
- MySQL(14):Select-limit(限制获得的记录数量)
1. limit 限制获得记录的数量 2.limit 语法: (1) limit offset, row_count: offset偏移量,从0开始. row_count总记录数. 分析: 案例演示 ...
- MySQL-视频跟随练习笔记
在表中添加某列 alter table tbl_user add email varchar(50); 在表中删除某列 alter table tbl_user drop email; 重命名某列 a ...
- 小学生之浅谈Struts2与struts1的运行机制
Struts1工作原理图: 1.初始化:struts框架的总控制器ActionServlet是一个Servlet,它在web.xml中配置成自动启动的Servlet,在启动时总控制器会读取配置文件(s ...
- 单线程与多线程的简单示例(以Windows服务发短信为示例)
单线程示例: public delegate void SM(); SM sm = new SM(() => { while (true) ...
- 使用静态资源设置UI信息
首先建立一个文件存放样式设置(资源字典),所有风格设置都可以这里进行 加入以下代码: <ResourceDictionary xmlns="http://schemas.microso ...
- wildfly部署solr.war
1.添加solr/home配置,有两种途径: 一个是修改solr.war包的web.xml,路径如下:solr-4.7.2.rar\WEB-INF\web.xml,添加如下内容:
- phpmyadmin导出数据库为什么是php文件
你的迅雷在作怪,把它卸载了,或者在迅雷的高级设置中,关闭监听浏览器,就不会触发迅雷下载,就没问题了.或者360浏览器的话,把急速模式改为兼容模式