#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <getopt.h>
#include <fcntl.h>
#include <unistd.h>
#include <errno.h>
#include <malloc.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <sys/time.h>
#include <sys/mman.h>
#include <sys/ioctl.h>
#include <asm/types.h>
#include <linux/videodev2.h>
#include <pthread.h>
#include <memoryAdapter.h>
#include <vencoder.h> static VideoEncoder *gVideoEnc;
static VencBaseConfig baseConfig;
static int gWidth = ;
static int gHeight = ; #define ALIGN_4K(x) (((x) + (4095)) & ~(4095))
#define ALIGN_1K(x) (((x) + (1023)) & ~(1023))
#define ALIGN_32B(x) (((x) + (31)) & ~(31))
#define ALIGN_16B(x) (((x) + (15)) & ~(15))
#define ALIGN_8B(x) (((x) + (7)) & ~(7)) #define REQ_COUNT 10 struct buffer
{
void *start;
size_t length;
char *addrVirY;
char *addrVirC;
}; static int fd = -;
struct buffer *buffers = NULL; struct v4l2_capability cap;
struct v4l2_format fmt;
struct v4l2_buffer buf[REQ_COUNT];
struct v4l2_requestbuffers req;
struct v4l2_buffer tmp_buf;
enum v4l2_buf_type type; int H264EncodeOneFrame(unsigned char *AddrVirY, unsigned char *AddrVirC, FILE *fpH264)
{
int result = ;
VencInputBuffer inputBuffer;
VencOutputBuffer outputBuffer;
int value;
unsigned int head_num = ;
VencHeaderData sps_pps_data; VencH264Param h264Param;
//* h264 param
h264Param.bEntropyCodingCABAC = ;
h264Param.nBitrate = * * ;
h264Param.nFramerate = ;
h264Param.nCodingMode = VENC_FRAME_CODING;
//h264Param.nCodingMode = VENC_FIELD_CODING;
h264Param.nMaxKeyInterval = ;
h264Param.sProfileLevel.nProfile = VENC_H264ProfileMain;
h264Param.sProfileLevel.nLevel = VENC_H264Level31;
h264Param.sQPRange.nMinqp = ;
h264Param.sQPRange.nMaxqp = ;
memset(&baseConfig, , sizeof(VencBaseConfig)); if (baseConfig.memops == NULL)
{
baseConfig.memops = MemAdapterGetOpsS();
if (baseConfig.memops == NULL)
{
printf("MemAdapterGetOpsS failed\n"); return -;
}
CdcMemOpen(baseConfig.memops);
} baseConfig.nInputWidth = gWidth;
baseConfig.nInputHeight = gHeight;
baseConfig.nStride = gWidth;
baseConfig.nDstWidth = gWidth;
baseConfig.nDstHeight = gHeight;
baseConfig.eInputFormat = VENC_PIXEL_YVU420SP; if (gVideoEnc == NULL)
{
printf("get SPS PPS\n");
gVideoEnc = VideoEncCreate((VENC_CODEC_TYPE)VENC_CODEC_H264);
VideoEncSetParameter(gVideoEnc, VENC_IndexParamH264Param, &h264Param);
value = ;
VideoEncSetParameter(gVideoEnc, VENC_IndexParamIfilter, &value);
value = ; //degree
VideoEncSetParameter(gVideoEnc, VENC_IndexParamRotation, &value);
value = ;
VideoEncSetParameter(gVideoEnc, VENC_IndexParamSetPSkip, &value);
VideoEncInit(gVideoEnc, &baseConfig);
}
VideoEncGetParameter(gVideoEnc, VENC_IndexParamH264SPSPPS, &sps_pps_data); fwrite(sps_pps_data.pBuffer, , sps_pps_data.nLength, fpH264); VencAllocateBufferParam bufferParam;
memset(&bufferParam, , sizeof(VencAllocateBufferParam));
memset(&inputBuffer, , sizeof(VencInputBuffer)); bufferParam.nSizeY = baseConfig.nInputWidth * baseConfig.nInputHeight;
bufferParam.nSizeC = baseConfig.nInputWidth * baseConfig.nInputHeight / ;
bufferParam.nBufferNum = ;
AllocInputBuffer(gVideoEnc, &bufferParam); GetOneAllocInputBuffer(gVideoEnc, &inputBuffer); memcpy(inputBuffer.pAddrVirY, AddrVirY, baseConfig.nInputWidth * baseConfig.nInputHeight);
memcpy(inputBuffer.pAddrVirC, AddrVirC, baseConfig.nInputWidth * baseConfig.nInputHeight / );
inputBuffer.bEnableCorp = ;
inputBuffer.sCropInfo.nLeft = ;
inputBuffer.sCropInfo.nTop = ;
inputBuffer.sCropInfo.nWidth = ;
inputBuffer.sCropInfo.nHeight = ;
FlushCacheAllocInputBuffer(gVideoEnc, &inputBuffer);
AddOneInputBuffer(gVideoEnc, &inputBuffer);
if (VENC_RESULT_OK != VideoEncodeOneFrame(gVideoEnc))
{
printf("VideoEncodeOneFrame failed.\n");
return -;
}
AlreadyUsedInputBuffer(gVideoEnc, &inputBuffer);
ReturnOneAllocInputBuffer(gVideoEnc, &inputBuffer); GetOneBitstreamFrame(gVideoEnc, &outputBuffer);
if (outputBuffer.nSize0 > )
{
printf("write pData0\n");
fwrite(outputBuffer.pData0, , outputBuffer.nSize0, fpH264);
}
if (outputBuffer.nSize1 > )
{
printf("write pData1\n");
fwrite(outputBuffer.pData1, , outputBuffer.nSize1, fpH264);
}
// outputBuffer.pData0;
// outputBuffer.nSize0;
// outputBuffer.pData1;
// outputBuffer.nSize1; FreeOneBitStreamFrame(gVideoEnc, &outputBuffer); if (baseConfig.memops != NULL)
{
CdcMemClose(baseConfig.memops);
baseConfig.memops = NULL;
}
VideoEncDestroy(gVideoEnc);
gVideoEnc = NULL; return ;
} int main(int argc, char **argv)
{
int iCounterCamera = ;
int iCounter100frame = ;
struct v4l2_fmtdesc fmtd;
int ret = ;
int index = ;
struct v4l2_format fmt2; if ((fd = open("/dev/video0", O_RDWR | O_NONBLOCK, )) < )
{
printf("open video0 failed.\n");
return -;
} memset(&fmtd, , sizeof(fmtd));
fmtd.index = ;
fmtd.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; while ((ret = ioctl(fd, VIDIOC_ENUM_FMT, &fmtd)) == )
{
fmtd.index++;
}
if (ioctl(fd, VIDIOC_QUERYCAP, &cap) < )
{
printf("Error:VIDIOC_QUERYCAP\n");
return -;
} if (ioctl(fd, VIDIOC_S_INPUT, &index) < )
{
printf("Error:VIDIOC_S_INPUT\n");
return -;
} fmt2.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
ret = ioctl(fd, VIDIOC_G_FMT, &fmt2);
printf("VIDIOC_G_FMT ret=%d \n", ret); fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
fmt.fmt.pix.width = gWidth;
fmt.fmt.pix.height = gHeight; fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_NV21;
if (ioctl(fd, VIDIOC_S_FMT, &fmt) < )
{
printf("Error:VIDIOC_S_FMT\n");
return -;
} req.count = REQ_COUNT;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = V4L2_MEMORY_MMAP;
if (ioctl(fd, VIDIOC_REQBUFS, &req) < )
{
printf("Error:VIDIOC_REQBUFS\n");
return -;
} buffers = calloc(req.count, sizeof(*buffers)); for (int i = ; i < req.count; i++)
{
buf[i].type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf[i].memory = V4L2_MEMORY_MMAP;
buf[i].index = i;
if (ioctl(fd, VIDIOC_QUERYBUF, buf + i) < )
{
printf("Error:VIDIOC_QUERYBUF\n");
return -;
} buffers[i].length = buf[i].length;
buffers[i].start = mmap(NULL, buf[i].length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, buf[i].m.offset); buf[i].type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf[i].memory = V4L2_MEMORY_MMAP;
buf[i].index = i; if (ioctl(fd, VIDIOC_QBUF, buf + i) < )
{
printf("Error: VIDIOC_QBUF\n");
return -;
} buffers[i].addrVirY = buffers[i].start;
buffers[i].addrVirC = buffers[i].addrVirY + ALIGN_16B(gWidth) * ALIGN_16B(gHeight);
} type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (ioctl(fd, VIDIOC_STREAMON, &type) < )
{
printf("Error: VIDIOC_STREAMON\n");
return -;
} FILE *fpYUV = NULL;
FILE *fpH264 = NULL;
char yuv_path[];
char h264_path[];
for (int i = ; i < req.count; i++)
{
struct v4l2_buffer buf; /*帧出列*/
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = i;
ioctl(fd, VIDIOC_DQBUF, &buf); memset(yuv_path, , );
memset(h264_path, , );
sprintf(yuv_path, "/mnt/extsd/src%04d.yuv", buf.index);
sprintf(h264_path, "/mnt/extsd/dst%04d.h264", buf.index);
fpYUV = fopen(yuv_path, "w");
fwrite(buffers[buf.index].addrVirY, , gWidth * gHeight, fpYUV);
fwrite(buffers[buf.index].addrVirC, , gWidth * gHeight / , fpYUV);
fpH264 = fopen(h264_path, "w");
H264EncodeOneFrame(buffers[buf.index].addrVirY, buffers[buf.index].addrVirC, fpH264);
fclose(fpYUV);
fpYUV = NULL;
fclose(fpH264);
fpH264 = NULL; /*buf入列*/
ioctl(fd, VIDIOC_QBUF, &buf);
} if (ioctl(fd, VIDIOC_STREAMOFF, &type) < )
{
printf("Error:VIDIOC_STREAMOFF\n"); // return 0;
return;
} for (int i = ; i < req.count; i++)
{
munmap(buffers[i].start, buf[i].length);
} close(fd); return ;
}

Zeta--S3 Linux抓取一帧YUV图像后使用硬件编码器编码成H.264的更多相关文章

  1. linux抓取top命令中数据的方法

    top在linux中是一个非常直观的命令,可以清晰地看到各进程对资源的使用情况.   但是如果你想从top命令展示中提取某些数据出来,如果想当然地使用这句命令: top|grep xxx 就会被卡住, ...

  2. 使用JavaCV/OpenCV抓取并存储摄像头图像

    http://blog.csdn.net/ljsspace/article/details/6702178  分类: 图形图像(3)  版权声明:本文为博主原创文章,未经博主允许不得转载. 本程序通过 ...

  3. 【Azure 环境】在Windows环境中抓取网络包(netsh trace)后,如何转换为Wireshark格式以便进行分析

    问题描述 如何在Windows环境中,不安装第三方软件的情况下(使用Windows内置指令),如何抓取网络包呢?并且如何转换为Wireshark 格式呢? 操作步骤 1) 以管理员模式打开CMD,使用 ...

  4. linux 抓取访问量排行

    需求: 分析图片服务日志,把日志(每个图片访问次数*图片大小的总和)排行,取top10,也就是计算每个url的总访问大小 语句: awk '{a[$1]+=$10;}END{for(i in a){p ...

  5. Linux 抓取网站命令

    wget -m -e robots=off -U "Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-CN; rv:1.9.1.6) Gecko/200 ...

  6. linux抓取usb包设置usbmon

  7. 用libvlc 抓取解码后的帧数据

    vlc是一套优秀的开源媒体库,其特点是提供了完整的流媒体框架, 用它可以非常方便的实现抓取解码帧的功能. 与此功能有关的关键API为 libvlc_video_set_callbacks /*设置回调 ...

  8. linux抓包工具tcpdump基本使用

    tcpdump 是一款灵活.功能强大的抓包工具,能有效地帮助排查网络故障问题. tcpdump 是一个命令行实用工具,允许你抓取和分析经过系统的流量数据包.它通常被用作于网络故障分析工具以及安全工具. ...

  9. Linux抓包工具:tcpdump

    tcpdump 是一个命令行实用工具,允许你抓取和分析经过系统的流量数据包.它通常被用作于网络故障分析工具以及安全工具. tcpdump 是一款强大的工具,支持多种选项和过滤规则,适用场景十分广泛.由 ...

随机推荐

  1. [翻译] ATTutorialController

    ATTutorialController https://github.com/AfonsoTsukamoto/ATTutorialController A simple to use tutoria ...

  2. Java学习---Quartz定时任务快速入门

    Quartz是OpenSymphony开源组织在Job scheduling领域又一个开源项目,它可以与J2EE与J2SE应用程序相结合也可以单独使用.Quartz可以用来创建简单或为运行十个,百个, ...

  3. TCP协议那些事

    tcp三次握手                                     tcp四次挥手   tcp十种状态 tcp的2MSL问题 说明 2MSL即两倍的MSL,TCP的TIME_WAI ...

  4. ASP.NET Core Startup类 Configure()方法 | ASP.NET Core 中间件详细说明

    ASP.NET Core 程序启动过程如下 目录 Startup 类 Configure() 方法 中间件 使用中间件 Configure 方法 的参数 IApplicationBuilder Ext ...

  5. web安全职位面试题目汇总

    Domain 解释一下同源策略 同源策略,那些东西是同源可以获取到的 如果子域名和顶级域名不同源,在哪里可以设置叫他们同源 如何设置可以跨域请求数据?jsonp是做什么的? Ajax Ajax是否遵循 ...

  6. 四种Timer的区别和用法

    1.System.Threading.Timer 线程计时器 1.最底层.轻量级的计时器.基于线程池实现的,工作在辅助线程. 2.它并不是内在线程安全的,并且使用起来比其他计时器更麻烦.此计时器通常不 ...

  7. Java日期格式化参数对照表

    Symbol Meaning Presentation Example G era designator Text AD y year Number 2009 M month in year Text ...

  8. solrCloud相关的管理命令

    创建新集群(创建一个索引库) http://192.168.72.141:8080/solr/admin/collections?action=CREATE&name=collection2& ...

  9. AngularJs 与服务器通信 $http, $q, $resource

    $http服务是AngularJS系统自带的,可以用来进行网络通信.获取远程服务器的数据.要记住的是,$http是对浏览器XMLHttpRequest的封装,也就是说,它其实是Ajax. $http( ...

  10. 在CentOS7上安装和使用ZooKeeper最新版本(V3.4.12)

    0.ZooKeeper文档 http://zookeeper.apache.org/doc/r3.4.11/zookeeperOver.html 1.准备 在CentOS7上安装zookeeper时, ...