步骤如下:

1. 下载

官网永远是王道,呵呵:http://ffmpeg.org/download.html

或者 svn checkout svn://svn.mplayerhq.hu/ffmpeg/trunk ffmpeg

2. 编译

    • 运行./configure
      很不幸,运行configure后出现了错误提示:
      yasm not found, use –disable-yasm for a crippled build

      解决方案:sudo apt-getinstall yasm
      重新./configure,搞定

    • make

    • make install
      权限不够需要前面加上sudo

    • 编译源码:一定注意加载库的顺序.

    • 参考代码:
    • #include <SDL/SDL.h>
      
      #include <libavcodec/avcodec.h>
      #include <libavformat/avformat.h>
      #include <stdio.h>
      #include <libswscale/swscale.h> int main(int argc, char *argv[]) {
      AVFormatContext *pFormatCtx;
      int i, videoStream;
      AVCodecContext *pCodecCtx;
      AVCodec *pCodec;
      AVFrame *pFrame;
      AVFrame *pFrameYUV;
      AVPacket packet;
      int frameFinished;
      int numBytes; // Register all formats and codecs
      av_register_all();
      // Open video file
      if (av_open_input_file(&pFormatCtx, "/home/user/workspace/panda/media/video/4f5a9c384d94eb21e5273ec263457535.mp4", NULL, , NULL )
      != ) {
      printf("=== cannot open file\n===");
      return -; // Couldn't open file
      }
      // Retrieve stream information
      if (av_find_stream_info(pFormatCtx) < )
      return -; // Couldn't find stream information
      // Dump information about file onto standard error
      // dump_format(pFormatCtx, 0, argv[1], false);
      // Find the first video stream
      videoStream = -;
      for (i = ; i < pFormatCtx->nb_streams; i++)
      if (pFormatCtx->streams[i]->codec->codec_type == CODEC_TYPE_VIDEO) ////////
      {
      videoStream = i;
      break;
      }
      if (videoStream == -)
      return -; // Didn't find a video stream
      // Get a pointer to the codec context for the video stream
      pCodecCtx = pFormatCtx->streams[videoStream]->codec; //////////
      ///////// SDL initialization
      SDL_Surface *screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, , SDL_HWSURFACE);
      SDL_Overlay *overlay = SDL_CreateYUVOverlay(pCodecCtx->width, pCodecCtx->height, SDL_YV12_OVERLAY, screen);
      static SDL_Rect rect;
      rect.x = ;
      rect.y = ;
      rect.w = pCodecCtx->width;
      rect.h = pCodecCtx->height;
      //////////
      // Find the decoder for the video stream
      pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
      if (pCodec == NULL )
      return -; // Codec not found
      // Open codec
      if (avcodec_open(pCodecCtx, pCodec) < )
      return -; // Could not open codec // Allocate video frame
      pFrame = avcodec_alloc_frame();
      // Allocate an AVFrame structure
      pFrameYUV = avcodec_alloc_frame();
      if (pFrameYUV == NULL )
      return -; static struct SwsContext *img_convert_ctx; img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height,
      // PIX_FMT_RGB24,
      PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL ); // Set SDL events
      SDL_EventState(SDL_ACTIVEEVENT, SDL_IGNORE);
      SDL_EventState(SDL_MOUSEMOTION, SDL_IGNORE);
      SDL_ShowCursor(SDL_DISABLE); // Read frames
      while ((av_read_frame(pFormatCtx, &packet) >= ) && (SDL_PollEvent(NULL ) == )) {
      // Is this a packet from the video stream?
      if (packet.stream_index == videoStream) {
      // Decode video frame
      avcodec_decode_video(pCodecCtx, pFrame, &frameFinished, packet.data, packet.size);
      // Did we get a video frame?
      if (frameFinished) {
      // Convert the image from its native format to YUV, and display SDL_LockYUVOverlay(overlay);
      pFrameYUV->data[] = overlay->pixels[];
      pFrameYUV->data[] = overlay->pixels[];
      pFrameYUV->data[] = overlay->pixels[]; pFrameYUV->linesize[] = overlay->pitches[];
      pFrameYUV->linesize[] = overlay->pitches[];
      pFrameYUV->linesize[] = overlay->pitches[]; // img_convert((AVPicture *) pFrameYUV, PIX_FMT_YUV420P, (AVPicture *) pFrame, pCodecCtx->pix_fmt, pCodecCtx->width,
      // pCodecCtx->height); // other codes
      // Convert the image from its native format to RGB sws_scale(img_convert_ctx, (const uint8_t* const *) pFrame->data, pFrame->linesize, , pCodecCtx->height, pFrameYUV->data,
      pFrameYUV->linesize); SDL_UnlockYUVOverlay(overlay);
      SDL_DisplayYUVOverlay(overlay, &rect);
      ///
      SDL_Delay();
      }
      }
      // Free the packet that was allocated by av_read_frame
      av_free_packet(&packet);
      }
      // Free the RGB image
      av_free(pFrameYUV);
      // Free the YUV frame
      av_free(pFrame);
      // Close the codec
      avcodec_close(pCodecCtx);
      // Close the video file
      av_close_input_file(pFormatCtx);
      //
      SDL_FreeYUVOverlay(overlay);
      return ;
      }
    • 放大播放:
    • #include <SDL/SDL.h>
      
      #include <libavcodec/avcodec.h>
      #include <libavformat/avformat.h>
      #include <stdio.h>
      #include <libswscale/swscale.h> int avcodec_main(int argc, char *argv[]) {
      AVFormatContext *pFormatCtx;
      int i, videoStream;
      AVCodecContext *pCodecCtx;
      AVCodec *pCodec;
      AVFrame *pFrame;
      AVFrame *pFrameYUV;
      AVPacket packet;
      int frameFinished;
      int numBytes; // Register all formats and codecs
      av_register_all();
      // Open video file
      if (av_open_input_file(&pFormatCtx, "/home/user/workspace/panda/media/video/4f5a9c384d94eb21e5273ec263457535.mp4", NULL, 0, NULL )
      != 0) {
      printf("=== cannot open file\n===");
      return -1; // Couldn't open file
      }
      // Retrieve stream information
      if (av_find_stream_info(pFormatCtx) < 0)
      return -1; // Couldn't find stream information
      // Dump information about file onto standard error
      // dump_format(pFormatCtx, 0, argv[1], false);
      // Find the first video stream
      videoStream = -1;
      for (i = 0; i < pFormatCtx->nb_streams; i++)
      if (pFormatCtx->streams[i]->codec->codec_type == CODEC_TYPE_VIDEO) ////////
      {
      videoStream = i;
      break;
      }
      if (videoStream == -1)
      return -1; // Didn't find a video stream
      // Get a pointer to the codec context for the video stream
      pCodecCtx = pFormatCtx->streams[videoStream]->codec; ////////// ///////// SDL initialization
      int w = 1920, h = 1080; SDL_Surface *screen = SDL_SetVideoMode(w, h, 0, SDL_HWSURFACE);
      SDL_Overlay *overlay = SDL_CreateYUVOverlay(w, h, SDL_YV12_OVERLAY, screen);
      static SDL_Rect rect;
      rect.x = 0;
      rect.y = 0;
      rect.w = w;
      rect.h = h;
      //////////
      // Find the decoder for the video stream
      pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
      if (pCodec == NULL )
      return -1; // Codec not found
      // Open codec
      if (avcodec_open(pCodecCtx, pCodec) < 0)
      return -1; // Could not open codec // Allocate video frame
      pFrame = avcodec_alloc_frame();
      // Allocate an AVFrame structure
      pFrameYUV = avcodec_alloc_frame();
      if (pFrameYUV == NULL )
      return -1; static struct SwsContext *img_convert_ctx; img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, w, h,
      // PIX_FMT_RGB24,
      PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL ); // Set SDL events
      SDL_EventState(SDL_ACTIVEEVENT, SDL_IGNORE);
      SDL_EventState(SDL_MOUSEMOTION, SDL_IGNORE);
      SDL_ShowCursor(SDL_DISABLE); // Read frames
      while ((av_read_frame(pFormatCtx, &packet) >= 0) && (SDL_PollEvent(NULL ) == 0)) {
      // Is this a packet from the video stream?
      if (packet.stream_index == videoStream) {
      // Decode video frame
      avcodec_decode_video(pCodecCtx, pFrame, &frameFinished, packet.data, packet.size);
      // Did we get a video frame?
      if (frameFinished) {
      // Convert the image from its native format to YUV, and display SDL_LockYUVOverlay(overlay);
      pFrameYUV->data[0] = overlay->pixels[0];
      pFrameYUV->data[1] = overlay->pixels[2];
      pFrameYUV->data[2] = overlay->pixels[1]; pFrameYUV->linesize[0] = overlay->pitches[0];
      pFrameYUV->linesize[1] = overlay->pitches[2];
      pFrameYUV->linesize[2] = overlay->pitches[1]; // img_convert((AVPicture *) pFrameYUV, PIX_FMT_YUV420P, (AVPicture *) pFrame, pCodecCtx->pix_fmt, pCodecCtx->width,
      // pCodecCtx->height); // other codes
      // Convert the image from its native format to RGB sws_scale(img_convert_ctx, (const uint8_t* const *) pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data,
      pFrameYUV->linesize); SDL_UnlockYUVOverlay(overlay);
      SDL_DisplayYUVOverlay(overlay, &rect);
      ///
      SDL_Delay(30);
      }
      }
      // Free the packet that was allocated by av_read_frame
      av_free_packet(&packet);
      }
      // Free the RGB image
      av_free(pFrameYUV);
      // Free the YUV frame
      av_free(pFrame);
      // Close the codec
      avcodec_close(pCodecCtx);
      // Close the video file
      av_close_input_file(pFormatCtx);
      //
      SDL_FreeYUVOverlay(overlay);
      return 0;
      }

        

    • 参考http://hi.baidu.com/xiaomeng008/archive/tag/ffmpeg

    • ffmpeg: http://blog.csdn.net/byxdaz/article/details/7316304

      ffmpeg编译和使用大全     http://lvzun.iteye.com/blog/706121

    • 重点推荐:http://dranger.com/ffmpeg/ An ffmpeg and SDL Tutorial
    • http://www.libsdl.org/release/SDL-1.2.15/test/  SDL官方示例。 overlay有rgb转换到YUV.

ffmpeg 在ubuntu上编译环境搭建和开发的更多相关文章

  1. ubuntu上lamp环境搭建

    首先,介绍个彻底删除linux已经安装的软件的方法. sudo apt-get purge mysql-server mysql-client mysql-common mysql-server-5. ...

  2. Ubuntu下qemu环境搭建vexpress开发平台

    在查找资料过程中,发现自己搭建虚拟的arm环境的话,有一个比较好的软件就是qemu了,当然还有其他的,大家各投所好就好. 接下来说一下qemu环境搭建过程. 其实搭建很简单,作为小白,我还是捣鼓了两三 ...

  3. RabbitMQ系列(一)RabbitMQ在Ubuntu上的环境搭建

    环境配置 Ubuntu Server 18.04 RabbitMQ 3.6.10 安装之前 我们使用apt-get进行RabbitMQ安装,在安装之前,强烈建议您把apt源换位国内,大大增加下载安装的 ...

  4. Ubuntu上CUDA环境搭建

    1.下载CUDA:https://developer.nvidia.com/cuda-toolkit-archive (如果已经安装了N卡驱动,最好用.deb,如果没有安装,可以用.run) 2.根据 ...

  5. RabbitMQ在Ubuntu上的环境搭建

    1.修改/etc/apt/sources.list文件 A:命令:vi /etc/apt/sources.list B:在最后一行加上:deb http://www.rabbitmq.com/debi ...

  6. Ubuntu Desktop开发生产环境搭建

    Ubuntu Desktop开发生产环境搭建 1   开发生产环境搭建 在本节内容开始前,先定义一下使用场合,没有哪种系统或者设备是万能的,都有它的优点和缺点,能够在具体的使用场景,根据自身的需求来取 ...

  7. ubuntu12.04下安卓编译环境搭建总结

    前言:      因为工作需要,经常要编译安卓下的动态库,公司有已经搭建好环境的服务器,但是第一自己想自己搭建一下了解一个整个过程,另外,公司的服务器也经常出现问 题,导致编译不了,所以就想自己搭建环 ...

  8. Tiny4412 开发板 编译环境搭建【转】

    本文转载自:http://blog.csdn.net/beijiwei/article/details/51055369 版权声明:本文为博主原创文章,未经博主允许不得转载. /*********** ...

  9. u-boot 移植(一)编译环境搭建

    u-boot 移植(一)编译环境搭建 soc:s3c2440 board:jz2440 uboot:u-boot-2016.11 toolchain:gcc-linaro-7.4.1-2019.02- ...

随机推荐

  1. bootstrap datetimepicker 格式化yyyymmdd时,无法读取yyyymmdd格式

    不知为何,java程序员爱用yyyymmdd格式化日期?导致bootstrap datetimepicker无法解析正确的日期 发现js中yyyymmdd不是正常能够解析的日期 查看datetimep ...

  2. 在线排错之curl命令详解

    春回大地万物复苏,好久不来,向各位博友问好. 简介 cURL是一个利用URL语法在命令行下工作的文件传输工具,1997年首次发行.它支持文件上传和下载,所以是综合传输工具,但按传统,习惯称cURL为下 ...

  3. 【亲测有效】Centos安装完成docker后启动docker报错docker: unrecognized service的两种解决方案

    今天在学习Docker的时候 使用yum install docker安装完后启动不了,报错如下: [root@Sakura ~]# service docker start docker: unre ...

  4. OC与JS的交互(iOS与H5混编)

    大神总结WKWebView的坑:https://mp.weixin.qq.com/s/rhYKLIbXOsUJC_n6dt9UfA 在开发过程中,经常会出现需要iOS移动端与H5混编的使用场景. iO ...

  5. B. Views Matter

    链接 [http://codeforces.com/contest/1061/problem/B] 题意 问你最多去掉多少块使得从上和右看,投影图不变 分析 注意细节,尤其第一列 代码 #includ ...

  6. 终于做完了这个pj

    首先要说这个博客网站实在是功能太弱!不知道为什么还要每次写博客.直接交作业不好吗- -b 1.估计时间: 看见这个任务就觉得很难啊,估计装vs2012就得半天,然后上学期选修的c++基本上都忘光了,本 ...

  7. 软件工程附加篇章:进阶四则运算和Core对接

    0x01 :计算模块(Core)和前端对接 首先特别结对编程刘乾组(SivilTaram)提供的计算模块(Core),http://www.cnblogs.com/SivilTaram/p/48599 ...

  8. BugPhobia开发篇章:Beta阶段第VIII次Scrum Meeting

    0x01 :Scrum Meeting基本摘要 Beta阶段第八次Scrum Meeting 敏捷开发起始时间 2015/12/22 00:00 A.M. 敏捷开发终止时间 2015/12/22 23 ...

  9. Linux内核分析 读书笔记 (第五章)

    第五章 系统调用 5.1 与内核通信 1.调用在用户空间进程和硬件设备之间添加了一个中间层.该层主要作用有三个: 为用户空间提供了硬件的抽象接口. 系统调用保证了系统的稳定和安全. 实现多任务和虚拟内 ...

  10. ChangeSort

    package com.home.test; import java.util.Arrays; public class ChangeSort { public String[] changeLoca ...