【Android】Android Camera实时数据采集及通过MediaCodec硬编码编码数据的流程
/*
* 编码器获取数据,编码,编码后的数据的处理等大致流程如下:
*/
/* 1.获取原始帧 */
@Override
onPreviewFrame( byte[] onPreviewData, Camera camera) {
/* 在此可以对onPreviewData进行Rotate或者Scale
* 也可以转换yuv的格式,例如yuv420P(YV12)或者yuv420SP(NV21/NV12)
* 相关开源类库可以使用libyuv/ffmpeg等
*/
getRawFrame(onPreviewData)
/* 然后将onPreviewData加入Camera回调*/
addCallbackBuffer(onPreviewData);
}
private void getRawFrame( byte[] rawFrame ) { encodFrame(rawFrame); }
/* 2.进行编码 */
private byte[] encodFrame(byte[] inputData) { return encodedData; }
/* 3.取得编码后的数据便可进行相应的操作,可以保存为本地文件,也可进行推流 */
Operation ? Send(byte[] sendData) : Save(byte[] saveData)
上述代码onPreviewFrame为Camera类的接口,使用Camera前需要进行SurfaceView及SurfaceHolder的初始化及相应interface的实现:
// init the preview surface
private void initview() {
SurfaceView surfaceView = (SurfaceView) findViewById(R.id.record_surface);
SurfaceHolder surfaceHolder = surfaceView.getHolder();
surfaceHolder.addCallback(this);
surfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
} @Override
public void surfaceCreated(SurfaceHolder holder) {
openCamera(holder); // 开启相机
} @Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
} @Override
public void surfaceDestroyed(SurfaceHolder holder) {
releaseCamera(); // 在surfaceDestroyed的时候记得releaseCamera
} private void openCamera(SurfaceHolder holder) {
releaseCamera();
try {
camera = getCamera(Camera.CameraInfo.CAMERA_FACING_BACK); // 根据需求选择前/后置摄像头
} catch (Exception e) {
camera = null;
if (AppContext.isDebugMode) {
e.printStackTrace();
}
}
if(mCamera != null){
try {
mCamera.setPreviewCallback(this);
mCamera.setDisplayOrientation(90); // 此方法为官方提供的旋转显示部分的方法,并不会影响onPreviewFrame方法中的原始数据;
if(parameters == null){
parameters = mCamera.getParameters();
}
parameters.setPreviewFormat(ImageFormat.NV21); // 常用格式:NV21 / YV12
parameters.setPreviewSize(width, height); // 还可以设置很多相机的参数,但是建议先遍历当前相机是否支持该配置,不然可能会导致出错;
mCamera.setParameters(parameters);
mCamera.setPreviewDisplay(holder);
mCamera.startPreview();
} catch (IOException e) {
e.printStackTrace();
}
}
} @TargetApi(9)
private Camera getCamera(int cameraType) {
Camera camera = null;
try {
camera = Camera.open(cameraType);
} catch (Exception e) {
e.printStackTrace();
}
return camera; // returns null if camera is unavailable
} private synchronized void releaseCamera() {
if (camera != null) {
try {
camera.setPreviewCallback(null);
} catch (Exception e) {
e.printStackTrace();
}
try {
camera.stopPreview();
} catch (Exception e) {
e.printStackTrace();
}
try {
camera.release();
} catch (Exception e) {
e.printStackTrace();
}
camera = null;
}
}
MediaCodec硬编码实现部分:
// video device.
private Camera camera;
private MediaCodec vencoder;
private MediaCodecInfo vmci;
private MediaCodec.BufferInfo vebi;
private byte[] vbuffer;
// video camera settings.
private Camera.Size vsize;
private int vcolor;
private int vbitrate_kbps = 300;
private final static int VFPS = 20;
private final static int VGOP = 5;
private final static int VWIDTH = 640;
private final static int VHEIGHT = 480; /* 首先需要初始化MediaCodec的配置 */
private void initMediaCodec() {
// choose the right vencoder, perfer qcom then google.
vcolor = chooseVideoEncoder();
// vencoder yuv to 264 es stream.
// requires sdk level 16+, Android 4.1, 4.1.1, the JELLY_BEAN
try {
vencoder = MediaCodec.createByCodecName(vmci.getName());
} catch (IOException e) {
Log.e(TAG, "create vencoder failed.");
e.printStackTrace();
return;
}
vebi = new MediaCodec.BufferInfo();
// setup the vencoder.
// @see https://developer.android.com/reference/android/media/MediaCodec.html
MediaFormat vformat = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, vsize.width, vsize.height);
vformat.setInteger(MediaFormat.KEY_COLOR_FORMAT, vcolor);
vformat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 0);
vformat.setInteger(MediaFormat.KEY_BIT_RATE, 1000 * vbitrate_kbps);
vformat.setInteger(MediaFormat.KEY_FRAME_RATE, VFPS);
vformat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, VGOP);
Log.i(TAG, String.format("vencoder %s, color=%d, bitrate=%d, fps=%d, gop=%d, size=%dx%d",
vmci.getName(), vcolor, vbitrate_kbps, VFPS, VGOP, vsize.width, vsize.height));
// the following error can be ignored:
// 1. the storeMetaDataInBuffers error:
// [OMX.qcom.video.encoder.avc] storeMetaDataInBuffers (output) failed w/ err -2147483648
// @see http://bigflake.com/mediacodec/#q12
vencoder.configure(vformat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
vencoder.start();
} // for the vbuffer for YV12(android YUV), @see below:
// https://developer.android.com/reference/android/hardware/Camera.Parameters.html#setPreviewFormat(int)
// https://developer.android.com/reference/android/graphics/ImageFormat.html#YV12
private int getYuvBuffer(int width, int height) {
// stride = ALIGN(width, 16)
int stride = (int) Math.ceil(width / 16.0) * 16;
// y_size = stride * height
int y_size = stride * height;
// c_stride = ALIGN(stride/2, 16)
int c_stride = (int) Math.ceil(width / 32.0) * 16;
// c_size = c_stride * height/2
int c_size = c_stride * height / 2;
// size = y_size + c_size * 2
return y_size + c_size * 2;
} // choose the video encoder by name.
private MediaCodecInfo chooseVideoEncoder(String name, MediaCodecInfo def) {
int nbCodecs = MediaCodecList.getCodecCount();
for (int i = 0; i < nbCodecs; i++) {
MediaCodecInfo mci = MediaCodecList.getCodecInfoAt(i);
if (!mci.isEncoder()) {
continue;
}
String[] types = mci.getSupportedTypes();
for (int j = 0; j < types.length; j++) {
if (types[j].equalsIgnoreCase(VCODEC)) {
//Log.i(TAG, String.format("vencoder %s types: %s", mci.getName(), types[j]));
if (name == null) {
return mci;
} if (mci.getName().contains(name)) {
return mci;
}
}
}
}
return def;
} // choose the right supported color format. @see below:
// https://developer.android.com/reference/android/media/MediaCodecInfo.html
// https://developer.android.com/reference/android/media/MediaCodecInfo.CodecCapabilities.html
private int chooseVideoEncoder() {
// choose the encoder "video/avc":
// 1. select one when type matched.
// 2. perfer google avc.
// 3. perfer qcom avc.
vmci = chooseVideoEncoder(null, null);
//vmci = chooseVideoEncoder("google", vmci);
//vmci = chooseVideoEncoder("qcom", vmci); int matchedColorFormat = 0;
MediaCodecInfo.CodecCapabilities cc = vmci.getCapabilitiesForType(VCODEC);
for (int i = 0; i < cc.colorFormats.length; i++) {
int cf = cc.colorFormats[i];
Log.i(TAG, String.format("vencoder %s supports color fomart 0x%x(%d)", vmci.getName(), cf, cf)); // choose YUV for h.264, prefer the bigger one.
// corresponding to the color space transform in onPreviewFrame
if ((cf >= cc.COLOR_FormatYUV420Planar && cf <= cc.COLOR_FormatYUV420SemiPlanar)) {
if (cf > matchedColorFormat) {
matchedColorFormat = cf;
}
}
}
for (int i = 0; i < cc.profileLevels.length; i++) {
MediaCodecInfo.CodecProfileLevel pl = cc.profileLevels[i];
Log.i(TAG, String.format("vencoder %s support profile %d, level %d", vmci.getName(), pl.profile, pl.level));
}
Log.i(TAG, String.format("vencoder %s choose color format 0x%x(%d)", vmci.getName(), matchedColorFormat, matchedColorFormat));
return matchedColorFormat;
}
上述代码为SRS的部分实现,仅作参考。
还推荐一个项目,该项目实现了编码后的数据存为本地.h264文件,方便分析,本人Fork的git地址:https://github.com/eterrao/MediaCodecEncodeH264.git
原作者git地址:https://github.com/sszhangpengfei/MediaCodecEncodeH264.git
(在此感谢拥有开源共享精神的各位朋友,因为你们我才能在学习和成长的路上少了很多坑!)
实际上MediaCodec的实现步骤基本都大同小异,但是请注意在API20以后编码器数据处理的机制有所改变,官方给出的建议如下:
链接:developer.android.com/reference/android/media/MediaCodec.html
以下摘抄官方API:
Processing Mode | API version <= 20 Jelly Bean/KitKat |
API version >= 21 Lollipop and later |
---|---|---|
Synchronous API using buffer arrays | Supported | Deprecated |
Synchronous API using buffers | Not Available | Supported |
Asynchronous API using buffers | Not Available | Supported |
Since LOLLIPOP
, the preferred method is to process data asynchronously by setting a callback before calling configure
. Asynchronous mode changes the state transitions slightly, because you must call start()
after flush()
to transition the codec to the Running sub-state and start receiving input buffers. Similarly, upon an initial call to start
the codec will move directly to the Running sub-state and start passing available input buffers via the callback.
MediaCodec is typically used like this in asynchronous mode:
MediaCodec codec = MediaCodec.createByCodecName(name);
MediaFormat mOutputFormat; // member variable
codec.setCallback(new MediaCodec.Callback() {
@Override
void onInputBufferAvailable(MediaCodec mc, int inputBufferId) {
ByteBuffer inputBuffer = codec.getInputBuffer(inputBufferId);
// fill inputBuffer with valid data
…
codec.queueInputBuffer(inputBufferId, …);
} @Override
void onOutputBufferAvailable(MediaCodec mc, int outputBufferId, …) {
ByteBuffer outputBuffer = codec.getOutputBuffer(outputBufferId);
MediaFormat bufferFormat = codec.getOutputFormat(outputBufferId); // option A
// bufferFormat is equivalent to mOutputFormat
// outputBuffer is ready to be processed or rendered.
…
codec.releaseOutputBuffer(outputBufferId, …);
} @Override
void onOutputFormatChanged(MediaCodec mc, MediaFormat format) {
// Subsequent data will conform to new format.
// Can ignore if using getOutputFormat(outputBufferId)
mOutputFormat = format; // option B
} @Override
void onError(…) {
…
}
});
codec.configure(format, …);
mOutputFormat = codec.getOutputFormat(); // option B
codec.start();
// wait for processing to complete
codec.stop();
codec.release();
Synchronous Processing using Buffers
Since LOLLIPOP
, you should retrieve input and output buffers using getInput
/OutputBuffer(int)
and/or getInput
/OutputImage(int)
even when using the codec in synchronous mode. This allows certain optimizations by the framework, e.g. when processing dynamic content. This optimization is disabled if you call getInput
/OutputBuffers()
.
Note: do not mix the methods of using buffers and buffer arrays at the same time. Specifically, only call getInput
/OutputBuffers
directly after start()
or after having dequeued an output buffer ID with the value ofINFO_OUTPUT_FORMAT_CHANGED
.
MediaCodec is typically used like this in synchronous mode:
MediaCodec codec = MediaCodec.createByCodecName(name);
codec.configure(format, …);
MediaFormat outputFormat = codec.getOutputFormat(); // option B
codec.start();
for (;;) {
int inputBufferId = codec.dequeueInputBuffer(timeoutUs);
if (inputBufferId >= 0) {
ByteBuffer inputBuffer = codec.getInputBuffer(…);
// fill inputBuffer with valid data
…
codec.queueInputBuffer(inputBufferId, …);
}
int outputBufferId = codec.dequeueOutputBuffer(…);
if (outputBufferId >= 0) {
ByteBuffer outputBuffer = codec.getOutputBuffer(outputBufferId);
MediaFormat bufferFormat = codec.getOutputFormat(outputBufferId); // option A
// bufferFormat is identical to outputFormat
// outputBuffer is ready to be processed or rendered.
…
codec.releaseOutputBuffer(outputBufferId, …);
} else if (outputBufferId == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// Subsequent data will conform to new format.
// Can ignore if using getOutputFormat(outputBufferId)
outputFormat = codec.getOutputFormat(); // option B
}
}
codec.stop();
codec.release();
Synchronous Processing using Buffer Arrays (deprecated)
In versions KITKAT_WATCH
and before, the set of input and output buffers are represented by the ByteBuffer[]
arrays. After a successful call to start()
, retrieve the buffer arrays using getInput
/OutputBuffers()
. Use the buffer ID-s as indices into these arrays (when non-negative), as demonstrated in the sample below. Note that there is no inherent correlation between the size of the arrays and the number of input and output buffers used by the system, although the array size provides an upper bound.
MediaCodec codec = MediaCodec.createByCodecName(name);
codec.configure(format, …);
codec.start();
ByteBuffer[] inputBuffers = codec.getInputBuffers();
ByteBuffer[] outputBuffers = codec.getOutputBuffers();
for (;;) {
int inputBufferId = codec.dequeueInputBuffer(…);
if (inputBufferId >= 0) {
// fill inputBuffers[inputBufferId] with valid data
…
codec.queueInputBuffer(inputBufferId, …);
}
int outputBufferId = codec.dequeueOutputBuffer(…);
if (outputBufferId >= 0) {
// outputBuffers[outputBufferId] is ready to be processed or rendered.
…
codec.releaseOutputBuffer(outputBufferId, …);
} else if (outputBufferId == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
outputBuffers = codec.getOutputBuffers();
} else if (outputBufferId == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// Subsequent data will conform to new format.
MediaFormat format = codec.getOutputFormat();
}
}
codec.stop();
codec.release();
【Android】Android Camera实时数据采集及通过MediaCodec硬编码编码数据的流程的更多相关文章
- Android平台Camera实时滤镜实现方法探讨(三)--通过Shader实现YUV转换RBG
http://blog.csdn.net/oshunz/article/details/50055057 文章例如该链接通过将YUV分成三个纹理,在shader中取出并且经过公式变换,转换成RGB.我 ...
- Android平台Camera实时滤镜实现方法探讨(十)--代码地址以及简单介绍(20160118更新)
简单做了个相机和图片编辑模块,时间原因非常多功能还没有做.尚有BUG,见谅,将在以后抽时间改动 代码地址 PS:请点个Star^-^ --------------------------------- ...
- Android平台Camera实时滤镜实现方法探讨(十一)--实时美颜滤镜
上一章完毕了对图片的磨皮处理.经过简单算法流程优化,能够达到非常快的速度.可是不能用于实时美颜.经实验,若採用仅仅处理Y信号的方案.半径极限大约是5-10,超过10则明显感受到卡顿.但对于1920X1 ...
- Android平台Camera实时滤镜实现方法探讨(九)--磨皮算法探讨(一)
上一篇开头提到了一些可用于磨皮的去噪算法.以下我们实现这些算法而且观察效果,咱不考虑实时性的问题 本文首先探讨的首先是<基于局部均方差相关信息的图像去噪及其在实时磨皮美容算法中的应用> 该 ...
- EasyPusher安卓Android手机直播推送之MediaCodec 硬编码H264格式
本文转自Holo的博客:http://blog.csdn.net/u013758734/article/details/50834770 最近在研究EasyDarwin的Push库EasyPusher ...
- 基于Socket的Android手机视频实时传输
首先,简单介绍一下原理.主要是在手机客户端 (Android)通过实现Camera.PreviewCallback接口,在其onPreviewFrame重载函数里面获取摄像头当前图像数据, 然后通过S ...
- 【Android】Camera 使用浅析
Camera的简单使用浅析 由于最近工作上用到android.hardware.Camera这个类,于是简单的学习了一些基本用法. 首先注意:Camera这个类在API21以后就不推荐使用了,官方提供 ...
- android中Camera setDisplayOrientation使用
在写相机相关应用的时候遇到捕获的画面方向和手机的方向不一致的问题,比如手机是竖着拿的,但是画面是横的,这是由于摄像头默认捕获的画面byte[]是根据横向来的,而你的应用是竖向的,解决办法是调用setD ...
- android.hardware.Camera类及其标准接口介绍
android.hardware.Camera类及其标准接口介绍,API level 19 http://developer.android.com/reference/android/hardwar ...
随机推荐
- 一、ASP.NET MVC 路由(一)--- ASP.NET WebForm路由模拟
ASP.NET WebForm 应用,用户请求的是物理文件,其中包括静态页面和动态页面,在Url中的显示都是服务器中一个物理文件的相对路径.但是ASP.NET MVC就不同了,用户请求的是Contro ...
- 创建动态WCF服务(无配置文件)
public class WCFServer { ServiceHost host = null; public WCFServer(string addressurl, string tcpurl, ...
- 【Bugly干货分享】iOS内存管理:从MRC到ARC实践
Bugly 技术干货系列内容主要涉及移动开发方向,是由Bugly邀请腾讯内部各位技术大咖,通过日常工作经验的总结以及感悟撰写而成,内容均属原创,转载请标明出处. 对于iOS程序员来说,内存管理是入门的 ...
- 设计模式之美:Template Method(模板方法)
索引 意图 结构 参与者 适用性 效果 相关模式 实现 实现方式(一):Template Method 模式结构样式代码. 意图 定义一个操作中的算法的骨架,而将一些步骤延迟到子类中. Templat ...
- kali linux 2016.1 滚动更新源
修改 /etc/apt/sources.list #kali官方源 deb http://http.kali.org/kali kali-rolling main non-free contrib ...
- 《你必须知道的.NET》读书笔记:方法表初窥
一.窥探准备工作 public class Base { public void M() { Console.WriteLine("M in Base"); } public vi ...
- [php入门] 4、HTML基础入门一篇概览
[php入门] 1.从安装开发环境环境到(庄B)做个炫酷的登陆应用 [php入门] 2.基础核心语法大纲 [php入门] 3.WAMP中的集成MySQL相关基础操作 1.HTML的作用 HTML是超文 ...
- BIT祝威博客汇总(Blog Index)
+BIT祝威+悄悄在此留下版了个权的信息说: 关于硬件(Hardware) <穿越计算机的迷雾>笔记 继电器是如何成为CPU的(1) 继电器是如何成为CPU的(2) 关于操作系统(Oper ...
- Hibernate中对象的三个状态解析
Hibernate 将操作的对象分为三种状态: 1. 瞬时 (Transient )/临时状态/自由状态 持久 (Persistent) 脱管 (Detached) 瞬时对象特征: 第一.不处于 Se ...
- Node.js入门:事件机制
Evented I/O for V8 JavaScript 基于V8引擎实现的事件驱动IO. 事件机制的实现 Node.js中大部分的模块,都继承自Event模块(http://n ...