怎样在Android本地视频播放器开发
在上一章Android本地视频播放器开发--SDL编译编译中编译出sdl的支持库,当时我们使用的2.0,但是有些api被更改了,所以在以下的使用者中我们使用SDL1.3的库,这个库我会传上源码以及编译出的库,接下来这张我们使用ffmpeg解码视频文件中的视频帧同时使用SDL去显示。
1、Decodec_Video.c 这是我视频解码的文件,其中内容如下:
[cpp]
 #include <stdio.h>  
 #include <android/log.h>  
  
 #ifdef __MINGW32__  
 #undef main /* Prevents SDL from overriding main() */  
 #endif  
  
 #include "../SDL/include/SDL.h"  
 #include "../SDL/include/SDL_thread.h"  
  
 #include "VideoPlayerDecode.h"  
 #include "../ffmpeg/libavutil/avutil.h"  
 #include "../ffmpeg/libavcodec/avcodec.h"  
 #include "../ffmpeg/libavformat/avformat.h"  
 #include "../ffmpeg/libswscale/swscale.h"  
  
 AVFormatContext *pFormatCtx; 
 int             i, videoStream; 
 AVCodecContext  *pCodecCtx; 
 AVCodec         *pCodec; 
 AVFrame         *pFrame; 
 AVPacket        packet; 
 int             frameFinished; 
 float           aspect_ratio; 
  
 static struct SwsContext *img_convert_ctx; 
 SDL_Surface     *screen; 
 SDL_Overlay *bmp; 
 SDL_Rect        rect; 
 SDL_Event       event; 
  
  
 JNIEXPORT jint JNICALL Java_com_zhangjie_graduation_videopalyer_jni_VideoPlayerDecode_VideoPlayer 
 (JNIEnv *env, jclass clz, jstring fileName) 
 { 
     const char* local_title = (*env)->GetStringUTFChars(env, fileName, NULL); 
     av_register_all();//注册所有支持的文件格式以及编解码器  
     if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) { 
         fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError()); 
         exit(1); 
     } 
     if(avformat_open_input(&pFormatCtx, local_title, NULL, NULL) != 0) 
                 return -1; 
     if(avformat_find_stream_info(pFormatCtx, NULL) < 0) 
                 return -1; 
     av_dump_format(pFormatCtx, -1, local_title, 0); 
     videoStream=-1; 
     for(i=0; i<pFormatCtx->nb_streams; i++) 
         if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO) { 
             videoStream=i; 
             break; 
         } 
     if(videoStream==-1) 
         return -1; // Didn't find a video stream  
     // Get a pointer to the codec context for the video stream  
     pCodecCtx=pFormatCtx->streams[videoStream]->codec; 
  
     // Find the decoder for the video stream  
     pCodec=avcodec_find_decoder(pCodecCtx->codec_id); 
     if(pCodec==NULL) { 
         fprintf(stderr, "Unsupported codec!\n"); 
         return -1; // Codec not found  
     } 
     if(avcodec_open2(pCodecCtx, pCodec, NULL) < 0)return -1; 
     pFrame = avcodec_alloc_frame(); 
     if(pFrame == NULL)return -1; 
     // Make a screen to put our video  
 #ifndef __DARWIN__  
     screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, 0, 0); 
 #else  
     screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, 24, 0); 
 #endif  
     if(!screen) { 
         fprintf(stderr, "SDL: could not set video mode - exiting\n"); 
         exit(1); 
     } 
     // Allocate a place to put our YUV image on that screen  
     bmp = SDL_CreateYUVOverlay(pCodecCtx->width, 
             pCodecCtx->height, 
             SDL_YV12_OVERLAY, 
             screen); 
     img_convert_ctx = sws_getContext(pCodecCtx->width,   
                           pCodecCtx->height, pCodecCtx->pix_fmt,   
                           pCodecCtx->width, pCodecCtx->height,   
                           PIX_FMT_RGB24, SWS_BICUBIC, NULL, NULL, NULL);   
  
     // Read frames and save first five frames to disk  
     i=0; 
     while(av_read_frame(pFormatCtx, &packet)>=0) { 
         // Is this a packet from the video stream?  
         if(packet.stream_index==videoStream) { 
             avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet); 
             // Did we get a video frame?  
             if(frameFinished) { 
                 SDL_LockYUVOverlay(bmp); 
                  
                 AVPicture *pict; 
                 pict->data[0] = bmp->pixels[0]; 
                 pict->data[1] = bmp->pixels[2]; 
                 pict->data[2] = bmp->pixels[1]; 
  
                 pict->linesize[0] = bmp->pitches[0]; 
                 pict->linesize[1] = bmp->pitches[2]; 
                 pict->linesize[2] = bmp->pitches[1]; 
                  
 sws_scale(img_convert_ctx, pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pict->data, pict->linesize); 
                 SDL_UnlockYUVOverlay(bmp); 
  
                 rect.x = 0; 
                 rect.y = 0; 
                 rect.w = pCodecCtx->width; 
                 rect.h = pCodecCtx->height; 
                 SDL_DisplayYUVOverlay(bmp, &rect); 
  
             } 
         } 
         // Free the packet that was allocated by av_read_frame  
         av_free_packet(&packet); 
         SDL_PollEvent(&event); 
         switch(event.type) { 
             case SDL_QUIT: 
                 SDL_Quit(); 
                 exit(0); 
                 break; 
             default: 
                 break; 
         } 
  
     } 
     // Free the YUV frame  
     av_free(pFrame); 
  
     // Close the codec  
     avcodec_close(pCodecCtx); 
  
     // Close the video file  
     av_close_input_file(pFormatCtx); 
 }
#include <stdio.h>
 #include <android/log.h>
#ifdef __MINGW32__
 #undef main /* Prevents SDL from overriding main() */
 #endif
#include "../SDL/include/SDL.h"
 #include "../SDL/include/SDL_thread.h"
#include "VideoPlayerDecode.h"
 #include "../ffmpeg/libavutil/avutil.h"
 #include "../ffmpeg/libavcodec/avcodec.h"
 #include "../ffmpeg/libavformat/avformat.h"
 #include "../ffmpeg/libswscale/swscale.h"
AVFormatContext *pFormatCtx;
 int             i, videoStream;
 AVCodecContext  *pCodecCtx;
 AVCodec         *pCodec;
 AVFrame         *pFrame;
 AVPacket        packet;
 int             frameFinished;
 float           aspect_ratio;
static struct SwsContext *img_convert_ctx;
 SDL_Surface     *screen;
 SDL_Overlay *bmp;
 SDL_Rect        rect;
 SDL_Event       event;
JNIEXPORT jint JNICALL Java_com_zhangjie_graduation_videopalyer_jni_VideoPlayerDecode_VideoPlayer
 (JNIEnv *env, jclass clz, jstring fileName)
 {
  const char* local_title = (*env)->GetStringUTFChars(env, fileName, NULL);
  av_register_all();//注册所有支持的文件格式以及编解码器
  if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {
   fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError());
   exit(1);
  }
  if(avformat_open_input(&pFormatCtx, local_title, NULL, NULL) != 0)
                 return -1;
  if(avformat_find_stream_info(pFormatCtx, NULL) < 0)
                 return -1;
  av_dump_format(pFormatCtx, -1, local_title, 0);
  videoStream=-1;
  for(i=0; i<pFormatCtx->nb_streams; i++)
   if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO) {
    videoStream=i;
    break;
   }
  if(videoStream==-1)
   return -1; // Didn't find a video stream
  // Get a pointer to the codec context for the video stream
  pCodecCtx=pFormatCtx->streams[videoStream]->codec;
// Find the decoder for the video stream
  pCodec=avcodec_find_decoder(pCodecCtx->codec_id);
  if(pCodec==NULL) {
   fprintf(stderr, "Unsupported codec!\n");
   return -1; // Codec not found
  }
  if(avcodec_open2(pCodecCtx, pCodec, NULL) < 0)return -1;
  pFrame = avcodec_alloc_frame();
  if(pFrame == NULL)return -1;
  // Make a screen to put our video
 #ifndef __DARWIN__
  screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, 0, 0);
 #else
  screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, 24, 0);
 #endif
  if(!screen) {
   fprintf(stderr, "SDL: could not set video mode - exiting\n");
   exit(1);
  }
  // Allocate a place to put our YUV image on that screen
  bmp = SDL_CreateYUVOverlay(pCodecCtx->width,
    pCodecCtx->height,
    SDL_YV12_OVERLAY,
    screen);
   img_convert_ctx = sws_getContext(pCodecCtx->width, 
                           pCodecCtx->height, pCodecCtx->pix_fmt, 
                           pCodecCtx->width, pCodecCtx->height, 
                           PIX_FMT_RGB24, SWS_BICUBIC, NULL, NULL, NULL);
// Read frames and save first five frames to disk
  i=0;
  while(av_read_frame(pFormatCtx, &packet)>=0) {
   // Is this a packet from the video stream?
   if(packet.stream_index==videoStream) {
    avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
    // Did we get a video frame?
    if(frameFinished) {
     SDL_LockYUVOverlay(bmp);
     
     AVPicture *pict;
     pict->data[0] = bmp->pixels[0];
     pict->data[1] = bmp->pixels[2];
     pict->data[2] = bmp->pixels[1];
pict->linesize[0] = bmp->pitches[0];
     pict->linesize[1] = bmp->pitches[2];
     pict->linesize[2] = bmp->pitches[1];
     
 sws_scale(img_convert_ctx, pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pict->data, pict->linesize);
     SDL_UnlockYUVOverlay(bmp);
rect.x = 0;
     rect.y = 0;
     rect.w = pCodecCtx->width;
     rect.h = pCodecCtx->height;
     SDL_DisplayYUVOverlay(bmp, &rect);
}
   }
   // Free the packet that was allocated by av_read_frame
   av_free_packet(&packet);
   SDL_PollEvent(&event);
   switch(event.type) {
    case SDL_QUIT:
     SDL_Quit();
     exit(0);
     break;
    default:
     break;
   }
}
  // Free the YUV frame
  av_free(pFrame);
// Close the codec
  avcodec_close(pCodecCtx);
// Close the video file
  av_close_input_file(pFormatCtx);
 }
 2、编译结果如下:
[cpp]
 root@zhangjie:/Graduation/jni# ndk-build 
 Install        : libSDL.so => libs/armeabi/libSDL.so 
 Install        : libffmpeg-neon.so => libs/armeabi/libffmpeg-neon.so 
 Compile arm    : ffmpeg-test-neon <= Decodec_Video.c 
 /Graduation/jni/jniffmpeg/Decodec_Video.c: In function 'Java_com_zhangjie_graduation_videopalyer_jni_VideoPlayerDecode_VideoPlayer': 
 /Graduation/jni/jniffmpeg/Decodec_Video.c:106:1: warning: passing argument 2 of 'sws_scale' from incompatible pointer type [enabled by default] 
 /Graduation/jni/jniffmpeg/../ffmpeg/libswscale/swscale.h:237:5: note: expected 'uint8_t const * const*' but argument is of type 'uint8_t **' 
 /Graduation/jni/jniffmpeg/Decodec_Video.c:137:2: warning: 'av_close_input_file' is deprecated (declared at /Graduation/jni/jniffmpeg/../ffmpeg/libavformat/avformat.h:1533) [-Wdeprecated-declarations] 
 SharedLibrary  : libffmpeg-test-neon.so 
 Install        : libffmpeg-test-neon.so => libs/armeabi/libffmpeg-test-neon.so
root@zhangjie:/Graduation/jni# ndk-build
 Install        : libSDL.so => libs/armeabi/libSDL.so
 Install        : libffmpeg-neon.so => libs/armeabi/libffmpeg-neon.so
 Compile arm    : ffmpeg-test-neon <= Decodec_Video.c
 /Graduation/jni/jniffmpeg/Decodec_Video.c: In function 'Java_com_zhangjie_graduation_videopalyer_jni_VideoPlayerDecode_VideoPlayer':
 /Graduation/jni/jniffmpeg/Decodec_Video.c:106:1: warning: passing argument 2 of 'sws_scale' from incompatible pointer type [enabled by default]
 /Graduation/jni/jniffmpeg/../ffmpeg/libswscale/swscale.h:237:5: note: expected 'uint8_t const * const*' but argument is of type 'uint8_t **'
 /Graduation/jni/jniffmpeg/Decodec_Video.c:137:2: warning: 'av_close_input_file' is deprecated (declared at /Graduation/jni/jniffmpeg/../ffmpeg/libavformat/avformat.h:1533) [-Wdeprecated-declarations]
 SharedLibrary  : libffmpeg-test-neon.so
 Install        : libffmpeg-test-neon.so => libs/armeabi/libffmpeg-test-neon.so3、SDL1.3源码
4、之前在Android本地视频播放器开发--NDK编译FFmpeg中没有添加swscale功能,所以需要重新编译ffmpeg,其脚本如下:
[plain]
 NDK=/opt/android-ndk-r8d 
 PLATFORM=$NDK/platforms/android-8/arch-arm/ 
 PREBUILT=$NDK/toolchains/arm-linux-androideabi-4.4.3/prebuilt/linux-x86 
 LOCAL_ARM_NEON=true 
 CPU=armv7-a 
 OPTIMIZE_CFLAGS="-mfloat-abi=softfp -mfpu=neon -marm -mcpu=cortex-a8" 
 PREFIX=./android/$CPU 
 ./configure --target-os=linux \ 
     --prefix=$PREFIX \ 
     --enable-cross-compile \ 
     --arch=arm \ 
     --enable-nonfree \ 
     --enable-asm \ 
     --cpu=cortex-a8 \ 
     --enable-neon \ 
     --cc=$PREBUILT/bin/arm-linux-androideabi-gcc \ 
     --cross-prefix=$PREBUILT/bin/arm-linux-androideabi- \ 
     --nm=$PREBUILT/bin/arm-linux-androideabi-nm \ 
     --sysroot=$PLATFORM \ 
     --extra-cflags=" -O3 -fpic -DANDROID -DHAVE_SYS_UIO_H=1 $OPTIMIZE_CFLAGS " \ 
     --disable-shared \ 
     --enable-static \ 
     --extra-ldflags="-Wl,-rpath-link=$PLATFORM/usr/lib -L$PLATFORM/usr/lib  -nostdlib -lc -lm -ldl -llog" \ 
     --disable-ffmpeg \ 
     --disable-ffplay \ 
     --disable-ffprobe \ 
     --disable-ffserver \ 
     --disable-encoders \ 
     --enable-avformat \ 
     --disable-optimizations \ 
     --disable-doc \ 
     --enable-pthreads \ 
     --disable-yasm \ 
     --enable-zlib \ 
     --enable-pic \ 
     --enable-small 
  
 #make clean 
 make  -j4 install 
  
 $PREBUILT/bin/arm-linux-androideabi-ar d libavcodec/libavcodec.a inverse.o 
  
 $PREBUILT/bin/arm-linux-androideabi-ld -rpath-link=$PLATFORM/usr/lib -L$PLATFORM/usr/lib  -soname libffmpeg-neon.so -shared -nostdlib  -z noexecstack -Bsymbolic --whole-archive --no-undefined -o $PREFIX/libffmpeg-neon.so libavcodec/libavcodec.a libavformat/libavformat.a libavutil/libavutil.a  libavfilter/libavfilter.a libswresample/libswresample.a libswscale/libswscale.a libavdevice/libavdevice.a -lc -lm -lz -ldl -llog  --warn-once  --dynamic-linker=/system/bin/linker $PREBUILT/lib/gcc/arm-linux-androideabi/4.4.3/libgcc.a
NDK=/opt/android-ndk-r8d
 PLATFORM=$NDK/platforms/android-8/arch-arm/
 PREBUILT=$NDK/toolchains/arm-linux-androideabi-4.4.3/prebuilt/linux-x86
 LOCAL_ARM_NEON=true
 CPU=armv7-a
 OPTIMIZE_CFLAGS="-mfloat-abi=softfp -mfpu=neon -marm -mcpu=cortex-a8"
 PREFIX=./android/$CPU
 ./configure --target-os=linux \
     --prefix=$PREFIX \
     --enable-cross-compile \
     --arch=arm \
     --enable-nonfree \
     --enable-asm \
     --cpu=cortex-a8 \
     --enable-neon \
     --cc=$PREBUILT/bin/arm-linux-androideabi-gcc \
     --cross-prefix=$PREBUILT/bin/arm-linux-androideabi- \
     --nm=$PREBUILT/bin/arm-linux-androideabi-nm \
     --sysroot=$PLATFORM \
     --extra-cflags=" -O3 -fpic -DANDROID -DHAVE_SYS_UIO_H=1 $OPTIMIZE_CFLAGS " \
     --disable-shared \
     --enable-static \
     --extra-ldflags="-Wl,-rpath-link=$PLATFORM/usr/lib -L$PLATFORM/usr/lib  -nostdlib -lc -lm -ldl -llog" \
     --disable-ffmpeg \
     --disable-ffplay \
     --disable-ffprobe \
     --disable-ffserver \
     --disable-encoders \
     --enable-avformat \
     --disable-optimizations \
     --disable-doc \
     --enable-pthreads \
     --disable-yasm \
     --enable-zlib \
     --enable-pic \
     --enable-small
#make clean
 make  -j4 install
$PREBUILT/bin/arm-linux-androideabi-ar d libavcodec/libavcodec.a inverse.o
$PREBUILT/bin/arm-linux-androideabi-ld -rpath-link=$PLATFORM/usr/lib -L$PLATFORM/usr/lib -soname libffmpeg-neon.so -shared -nostdlib -z noexecstack -Bsymbolic --whole-archive --no-undefined -o $PREFIX/libffmpeg-neon.so libavcodec/libavcodec.a libavformat/libavformat.a libavutil/libavutil.a libavfilter/libavfilter.a libswresample/libswresample.a libswscale/libswscale.a libavdevice/libavdevice.a -lc -lm -lz -ldl -llog --warn-once --dynamic-linker=/system/bin/linker $PREBUILT/lib/gcc/arm-linux-androideabi/4.4.3/libgcc.a
怎样在Android本地视频播放器开发的更多相关文章
- Android本地视频播放器开发--视频解码
		在上一章Android本地视频播放器开发--SDL编译编译中编译出sdl的支持库,当时我们使用的2.0,但是有些api被更改了,所以在以下的使用者中我们使用SDL1.3的库,这个库我会传上源码以及编译 ... 
- Android本地视频播放器开发--简易播放器原型
		在以前的基础上,将音视频进行合并,音频播放采用OpenSL ES,视频播放采用OpenGL ES2.0进行显示,这次的版本其中音频和视频是在同一个线程,会造成音频断断续续,后续会采用音频使用SDL,视 ... 
- React Native实战系列教程之自定义原生UI组件和VideoView视频播放器开发
		React Native实战系列教程之自定义原生UI组件和VideoView视频播放器开发 2016/09/23 | React Native技术文章 | Sky丶清| 4 条评论 | 1 ... 
- android音乐播放器开发教程
		android音乐播放器开发教程 Android扫描sd卡和系统文件 Android 关于录音文件的编解码 实现米聊 微信一类的录音上传的功能 android操作sdcard中的多媒体文件——音乐列表 ... 
- android视频播放器开发
		http://blog.csdn.net/u010181592/article/details/49301703 http://blog.csdn.net/qq_33291295/article/de ... 
- 22_Android中的本地音乐播放器和网络音乐播放器的编写,本地视频播放器和网络视频播放器,照相机案例,偷拍案例实现
		1 编写以下案例: 当点击了"播放"之后,在手机上的/mnt/sdcard2/natural.mp3就会播放. 2 编写布局文件activity_main.xml <Line ... 
- android音乐播放器开发   SweetMusicPlayer   载入歌曲列表
		上一篇写了播放器的总体实现思路,http://blog.csdn.net/huweigoodboy/article/details/39855653,如今来总结下载入歌曲列表. 代码地址:https: ... 
- android音乐播放器开发 SweetMusicPlayer 播放本地音乐
		上一篇写了载入歌曲列表,http://blog.csdn.net/huweigoodboy/article/details/39856411,如今来总结下播放本地音乐. 一,MediaPlayer 首 ... 
- android音乐播放器开发 SweetMusicPlayer 智能匹配本地歌词
		上一篇写了使用MediaPlayer播放音乐,http://blog.csdn.net/huweigoodboy/article/details/39861539. 代码地址:https://gith ... 
随机推荐
- [poj 1265]Area[Pick定理][三角剖分]
			题意: 给出机器人移动的向量, 计算包围区域的内部整点, 边上整点, 面积. 思路: 面积是用三角剖分, 边上整点与GCD有关, 内部整点套用Pick定理. S = I + E / 2 - 1 I 为 ... 
- Webfrom 生成流水号 组合查询 Repeater中单选与复选控件的使用 JS实战应用
			Default.aspx 网页界面 <%@ Page Language="C#" AutoE ... 
- Android 程式开发:(廿二)服务 —— 22.1 自定义服务
			服务,就是跑在后台的“程序”,不需要和用户进行交互.举个例子,当使用一款应用的时候,可能同时想在后台播放一些音乐.在这种情况下,后来播放音乐的代码不需要和用户进行交互,所以,它就可能被当成一个服务.当 ... 
- ios开发者创建app应用开发授权文件 实战方法:
			收到apple邮件成为合法的开发者后,进入https://developer.apple.com/account/ios/profile/profileList.action页面 总的有4步操作: 1 ... 
- 基于visual Studio2013解决面试题之0702输出数字
			 题目 
- ORA-00942:表或视图不存在(低级错误)
			在好多时候.调试PL/SQL对象时会报.ORA-00942 看看错误原因吧: watermark/2/text/aHR0cDovL2Jsb2cuY3Nkbi5uZXQvamFjc29uX2JhaQ== ... 
- Python中字符串的方法及注释
			先整理到这里:用的时候便于查询.相当于自己的资料库吧. capitalize() 把字符串的第一个字符改为大写 casefold() 把整个字符串的所有字符改为小写 center(width) ... 
- Mac  修改Host 绑定host
			Mac 系统下 ,修改Host 文件: 打开命令行终端 输入 sudo vi /etc/hosts 之后回车确认,进入vi 编辑界面(进行vi编辑操作,之后保存就行了) 版权声明:本文为博主原创文章, ... 
- Python  unittest 官方文档
			https://docs.python.org/2/library/unittest.html# 
- c#1所搭建的核心基础之委托
			本文将对c#1的委托进行详细探索 委托(delegate) 注 delegate:vt.委派代表; 授权给; [法律]债务转移; 委托作用:在恰当的时间执行一系列操作 1.简单委托的构成 声明委 ... 
