直接上代码

// demo.cpp : 定义控制台应用程序的入口点。
//
#include "stdafx.h"
#include
extern "C"
{
#include "../../common/common.h"
#include "../../common/cpu.h""
#include "../../x264.h"
#include "../../encoder/set.h"
}
#include "librtmp/rtmp_sys.h"
#include "librtmp/log.h"
#include "librtmp/amf.h"
#include "CameraDS.h"
void ConvertYCbCr2BGR(unsigned char *pYUV,unsigned char *pBGR,int iWidth,int iHeight);
void ConvertRGB2YUV(int w,int h,unsigned char *bmp,unsigned char *yuv);
int InitSockets()
{
#ifdef WIN32
    WORD version;
    WSADATA wsaData;
    version = MAKEWORD(1, 1);
    return (WSAStartup(version, &wsaData) == 0);
#else
    return TRUE;
#endif
}
inline void CleanupSockets()
{
#ifdef WIN32
    WSACleanup();
#endif
}
#define HEX2BIN(a)      (((a)&0x40)?((a)&0xf)+9:((a)&0xf))
int hex2bin(char *str, char **hex)
{
    char *ptr;
    int i, l = strlen(str);
    if (l & 1)
        return 0;
    *hex = (char *)malloc(l/2);
    ptr = *hex;
    if (!ptr)
        return 0;
    for (i=0; i
        *ptr++ = (HEX2BIN(str[i]) << 4) | HEX2BIN(str[i+1]);
    return l/2;
}
char * put_byte( char *output, uint8_t nVal )
{
    output[0] = nVal;
    return output+1;
}
char * put_be16(char *output, uint16_t nVal )
{
    output[1] = nVal & 0xff;
    output[0] = nVal >> 8;
    return output+2;
}
char * put_be24(char *output,uint32_t nVal )
{
    output[2] = nVal & 0xff;
    output[1] = nVal >> 8;
    output[0] = nVal >> 16;
    return output+3;
}
char * put_be32(char *output, uint32_t nVal )
{
    output[3] = nVal & 0xff;
    output[2] = nVal >> 8;
    output[1] = nVal >> 16;
    output[0] = nVal >> 24;
    return output+4;
}
char *  put_be64( char *output, uint64_t nVal )
{
    output=put_be32( output, nVal >> 32 );
    output=put_be32( output, nVal );
    return output;
}
char * put_amf_string( char *c, const char *str )
{
    uint16_t len = strlen( str );
    c=put_be16( c, len );
    memcpy(c,str,len);
    return c+len;
}
char * put_amf_double( char *c, double d )
{
    *c++ = AMF_NUMBER;    
    {
        unsigned char *ci, *co;
        ci = (unsigned char *)&d;
        co = (unsigned char *)c;
        co[0] = ci[7];
        co[1] = ci[6];
        co[2] = ci[5];
        co[3] = ci[4];
        co[4] = ci[3];
        co[5] = ci[2];
        co[6] = ci[1];
        co[7] = ci[0];
    }
    return c+8;
}
int main(int argc, char * argv[])
{
    if (argc<2)
    {
        RTMP_LogPrintf("RTMP_URL IS NULL!!!/n");
        //return -1;
    }
    if (!InitSockets())
    {
        RTMP_LogPrintf("InitSockets Error!/n");
        return -1;
    }
    RTMP_LogPrintf("InitSockets!/n");
    CoInitialize(NULL);
    CCameraDS camera;
    if (!camera.OpenCamera(0,320,240,false))
    {
        RTMP_LogPrintf("Open Camera Error/n");
        return -1;
    }
    int nHeight=camera.GetHeight();
    int nWidth=camera.GetWidth();
    unsigned char * szRGBBuffer=new unsigned char[nHeight*nWidth * 3];
    RTMP_LogPrintf("Camera Open Scuess!  Picture Size[-x%d]/n",nWidth,nHeight);
    RTMP_debuglevel = RTMP_LOGINFO;
    RTMP *r;
    //char uri[]="rtmp://127.0.0.1/live/test";
    //char uri[]="rtmp://192.199.15.223/live/test";
    //char uri[]="rtmp://221.9.244.4/live/jltv";
    //char uri[]="rtmp://192.199.15.223/oflaDemo/red5StreamDemo";
    //char uri[]="rtmp://192.199.15.151/live/test";
    char uri[]="rtmp://127.0.0.1/live/zzj";
    r= RTMP_Alloc();
    RTMP_Init(r);
    RTMP_SetupURL(r, (char*)uri);
    RTMP_EnableWrite(r);
    RTMP_Connect(r, NULL);
    RTMP_ConnectStream(r,0);
    unsigned char szNalBuffer[1024*32];
    unsigned char szBodyBuffer[1024*32];
    x264_nal_t    *p264Nal;
    int            i264Nal;
    x264_param_t * p264Param;
    x264_picture_t * p264Pic;
    x264_t *p264Handle;    
    p264Param = new x264_param_t();
    p264Pic     = new x264_picture_t();
    memset(p264Pic,0,sizeof(x264_picture_t));
    x264_param_default(p264Param);    //set default param
    p264Param->i_threads=2;
    p264Param->i_width    = nWidth;    //set frame width
    p264Param->i_height    = nHeight;    //set frame height
    
    p264Param->b_cabac =0;
    p264Param->i_bframe =0;
    p264Param->b_interlaced=0;
    p264Param->rc.i_rc_method=X264_RC_ABR;//X264_RC_CQP
    p264Param->i_level_idc=21;
    p264Param->rc.i_bitrate=200;
    p264Param->i_fps_num=30;
    p264Param->i_keyint_max=p264Param->i_fps_num*3;
    if((p264Handle = x264_encoder_open(p264Param)) == NULL)
    {
        fprintf( stderr, "x264_encoder_open failed/n" );
        return -2;
    }
    bs_t bs={0};
    x264_picture_alloc(p264Pic, X264_CSP_YV12, p264Param->i_width, p264Param->i_height);
    p264Pic->i_type = X264_TYPE_AUTO;
    x264_picture_t pic_out;
    RTMPPacket packet={0};
    memset(&packet,0,sizeof(RTMPPacket));
    packet.m_nChannel = 0x04;
    packet.m_headerType = RTMP_PACKET_SIZE_LARGE;
    packet.m_nTimeStamp = 0;
    packet.m_nInfoField2 = r->m_stream_id;
    packet.m_hasAbsTimestamp = 0;
    packet.m_body =(char *) szBodyBuffer;
    char * szTmp=(char *)szBodyBuffer;
    packet.m_packetType = RTMP_PACKET_TYPE_INFO;
    szTmp=put_byte(szTmp, AMF_STRING );
    szTmp=put_amf_string(szTmp, "@setDataFrame" );
    szTmp=put_byte(szTmp, AMF_STRING );
    szTmp=put_amf_string(szTmp, "onMetaData" );
    szTmp=put_byte(szTmp, AMF_OBJECT );
    szTmp=put_amf_string( szTmp, "author" );
    szTmp=put_byte(szTmp, AMF_STRING );
    szTmp=put_amf_string( szTmp, "" );
    szTmp=put_amf_string( szTmp, "copyright" );
    szTmp=put_byte(szTmp, AMF_STRING );
    szTmp=put_amf_string( szTmp, "" );
    szTmp=put_amf_string( szTmp, "description" );
    szTmp=put_byte(szTmp, AMF_STRING );
    szTmp=put_amf_string( szTmp, "" );
    szTmp=put_amf_string( szTmp, "keywords" );
    szTmp=put_byte(szTmp, AMF_STRING );
    szTmp=put_amf_string( szTmp, "" );
    szTmp=put_amf_string( szTmp, "rating" );
    szTmp=put_byte(szTmp, AMF_STRING );
    szTmp=put_amf_string( szTmp, "" );
    szTmp=put_amf_string( szTmp, "presetname" );
    szTmp=put_byte(szTmp, AMF_STRING );
    szTmp=put_amf_string( szTmp, "Custom" );
    szTmp=put_amf_string( szTmp, "width" );
    szTmp=put_amf_double( szTmp, p264Param->i_width );
    szTmp=put_amf_string( szTmp, "width" );
    szTmp=put_amf_double( szTmp, p264Param->i_width );
    szTmp=put_amf_string( szTmp, "height" );
    szTmp=put_amf_double( szTmp, p264Param->i_height );
    szTmp=put_amf_string( szTmp, "framerate" );
    szTmp=put_amf_double( szTmp, (double)p264Param->i_fps_num / p264Param->i_fps_den );
    szTmp=put_amf_string( szTmp, "videocodecid" );
    szTmp=put_byte(szTmp, AMF_STRING );
    szTmp=put_amf_string( szTmp, "avc1" );
    szTmp=put_amf_string( szTmp, "videodatarate" );
    szTmp=put_amf_double( szTmp, p264Param->rc.i_bitrate );
    szTmp=put_amf_string( szTmp, "avclevel" );
    szTmp=put_amf_double( szTmp, p264Param->i_level_idc );
    szTmp=put_amf_string( szTmp, "avcprofile" );
    szTmp=put_amf_double( szTmp, 0x42 );
    szTmp=put_amf_string( szTmp, "videokeyframe_frequency" );
    szTmp=put_amf_double( szTmp, 3 );
    szTmp=put_amf_string( szTmp, "" );
    szTmp=put_byte( szTmp, AMF_OBJECT_END );
    packet.m_nBodySize=szTmp-(char *)szBodyBuffer;
    RTMP_SendPacket(r,&packet,1);
    packet.m_packetType = RTMP_PACKET_TYPE_VIDEO;    
    szBodyBuffer[ 0]=0x17;
    szBodyBuffer[ 1]=0x00;
    szBodyBuffer[ 2]=0x00;
    szBodyBuffer[ 3]=0x00;
    szBodyBuffer[ 4]=0x00;
    szBodyBuffer[ 5]=0x01;
    szBodyBuffer[ 6]=0x42;
    szBodyBuffer[ 7]=0xC0;
    szBodyBuffer[ 8]=0x15;
    szBodyBuffer[ 9]=0x03;
    szBodyBuffer[10]=0x01;
    szTmp=(char *)szBodyBuffer+11;
    short slen=0;
    bs_init(&bs,szNalBuffer,16);//初始话bs
    x264_sps_write(&bs, p264Handle->sps);//读取编码器的SPS
    slen=bs.p-bs.p_start+1;//spslen(short)
    slen=htons(slen);
    memcpy(szTmp,&slen,sizeof(short));
    szTmp+=sizeof(short);
    *szTmp=0x67;
    szTmp+=1;
    memcpy(szTmp,bs.p_start,bs.p-bs.p_start);
    szTmp+=bs.p-bs.p_start;
    *szTmp=0x01;
    szTmp+=1;
    bs_init(&bs,szNalBuffer,16);//初始话bs
    x264_pps_write(&bs, p264Handle->pps);//读取编码器的PPS
    slen=bs.p-bs.p_start+1;//spslen(short)
    slen=htons(slen);
    memcpy(szTmp,&slen,sizeof(short));
    szTmp+=sizeof(short);
    *szTmp=0x68;
    szTmp+=1;
    memcpy(szTmp,bs.p_start,bs.p-bs.p_start);
    szTmp+=bs.p-bs.p_start;
    packet.m_nBodySize=szTmp-(char *)szBodyBuffer;
    RTMP_SendPacket(r,&packet,0);
    unsigned int nTimes=0;
    unsigned int oldTick=GetTickCount();
    unsigned int newTick=0;
    packet.m_nTimeStamp=0;
    
    while(true)
    {
        szBodyBuffer[ 0]=0x17;
        szBodyBuffer[ 1]=0x01;
        szBodyBuffer[ 2]=0x00;
        szBodyBuffer[ 3]=0x00;
        szBodyBuffer[ 4]=0x42;
        unsigned char * szTmp=szBodyBuffer+5;
        unsigned  char * pNal=szNalBuffer;
        nTimes++;
        int nFramsInPack=0;
        while(true)
        {
            nFramsInPack++;
            unsigned char * pCameraBuf = camera.QueryFrame();
            if (!pCameraBuf)
            {
                return -1;
            }
            for(int ii=0;ii
            {
                memcpy(szRGBBuffer+(nWidth*3)*(nHeight-ii-1),pCameraBuf+(nWidth*3)*ii,nWidth*3);
                //memcpy(pCameraBuf+nWidth*(nHeight-ii-1),pCameraBuf+nWidth*ii,nWidth*3);
                //memcpy(szLineBuffer,pCameraBuf+nWidth*(nHeight-ii-1),nWidth*3);
            }
            ConvertRGB2YUV(nWidth,nHeight,szRGBBuffer,p264Pic->img.plane[0]);
            //memcpy(p264Pic->img.plane[0],szNalBuffer,nWidth*nHeight);
            //memcpy(p264Pic->img.plane[1],szNalBuffer+nWidth*nHeight,nWidth*nHeight/4);
            //memcpy(p264Pic->img.plane[2],szNalBuffer+nWidth*nHeight*5/4,nWidth*nHeight/4);
            
            if( x264_encoder_encode( p264Handle, &p264Nal, &i264Nal, p264Pic ,&pic_out) < 0 )
            {
                fprintf( stderr, "x264_encoder_encode failed/n" );
            }
            for( int i = 0; i < i264Nal; i++ )
            {
                int i_size;
                int i_data;
                i_data = 1024*32;
                if( ( i_size = x264_nal_encode( pNal, &i_data, 1, &p264Nal[i] ) ) > 0 )
                {
                    if ((pNal[4]&0x60)==0)
                    {
                        continue;
                    }
                    if (pNal[4]==0x67)
                    {
                        continue;
                    }
                    if (pNal[4]==0x68)
                    {
                        continue;
                    }
                    memmove(pNal,pNal+4,i_size-4);
                    pNal+=i_size-4;
                }
                else if( i_size < 0 )
                {
                    fprintf( stderr,"need to increase buffer size (size=%d)/n", -i_size );
                }
            }
            unsigned int nSize=pNal-szNalBuffer;
            packet.m_nBodySize=nSize+9;
            if (i264Nal>1)
            {
                szBodyBuffer[ 0]=0x17;
            }
            else
            {
                szBodyBuffer[ 0]=0x27;
            }
            put_be32((char *)szBodyBuffer+5,nSize);
            memcpy(szBodyBuffer+9,szNalBuffer,pNal-szNalBuffer);
            RTMP_SendPacket(r,&packet,0);
            Sleep(20);
            newTick=GetTickCount();
            //RTMP_LogStatus("/rInfo NAUL Type:0xx size: ] Tick:d d",szNalBuffer[0], nSize,33-nSleep,GetTickCount()-oldTick+nSleep);
            packet.m_headerType = RTMP_PACKET_SIZE_MEDIUM;
            packet.m_nTimeStamp+=newTick-oldTick;
            oldTick=newTick;
            break;
        }
    }
    return 0;
}

(转)H264通过RTMP发布 V2.0 (Red5 Wowza 测试通过)的更多相关文章

  1. 怎样在IDEA中使用JUnit4和JUnitGenerator V2.0自动生成测试模块

     因为项目的需要,所以研究了一下自动生成测试代码.将经验记录下来,总会有用的.我个人认为,好记性不如多做笔记多反思总结. 1.    前提条件 开发环境已正确配置 工程已解决JUnit依赖关系(pom ...

  2. 25.怎样在IDEA中使用JUnit4和JUnitGenerator V2.0自动生成测试模块

    转自:https://blog.csdn.net/wangyj1992/article/details/78387728 因为项目的需要,所以研究了一下自动生成测试代码.将经验记录下来,总会有用的.我 ...

  3. D盾 v2.0.6.42 测试记录

    0x01 前言 之前发了一篇博客<Bypass D盾_IIS防火墙SQL注入防御(多姿势)>,D哥第一时间联系我,对问题进行修复.这段时间与D哥聊了挺多关于D盾这款产品的话题,实在是很佩服 ...

  4. Restful.Data v2.0发布,谢谢你们的支持和鼓励

    v1.0发布后,承蒙各位博友们的热心关注,也给我不少意见和建议,在此我真诚的感谢 @冰麟轻武 等朋友,你们的支持和鼓励,是这个开源项目最大的推动力. v2.0在除了细枝末节外,在功能上主要做了一下更新 ...

  5. iBox v2.0 发布,Web化仿iOS7界面/交互的JavaScirpt库

    iBox2 是一个仿 iOS 7 界面/交互的 JavaScirpt 库,它运行在 webkit 内核的移动浏览器之上,依赖 iScroll5,帮助开发者构建更接近 iOS 体验的 WebApp. 伴 ...

  6. QzzmServer v2.0正式版发布

                                 V2.1升级程序已发布,具体见下文                                首先,感谢网友的热情的测评及反馈,现Qzzm ...

  7. PCMM(人力资源能力成熟度模型)V2.0中英对照版发布

    PCMM中英版终于发布 时光荏苒,从当初的回眸到如今的回头,这才发现:坚守一份承诺是多么的不易! 一年多了,这份承载殷切期待的作品--<PCMM(人力资源能力成熟度模型)V2.0 (中英文对照版 ...

  8. go-wingui 2018 全新 v2.0 版本发布,包含重大更新!

    go-wingui 2018 全新 v2.0 版本发布,包含重大更新!使用新版CEF内核Chromium 63.0.3239.109,页面可以使用最新的css3,html5技术.使用delphi7重写 ...

  9. 【Beta】“北航社团帮”发布声明——小程序v2.0与网页端v1.0

    目录 Beta版本新功能 小程序v2.0新功能 新功能列表 功能详情图 新功能动图展示 网页端v1.0功能 登录方式 社团信息的修改 新闻的录入和修改 活动的录入和修改 这一版修复的缺陷 Beta版本 ...

随机推荐

  1. PHP学习笔记之数组游标操作

    数组有N个单元,同时只能操作一个单元.比如循环时,只能一个一个单元读取他的值. 那么数组是怎么记住刚才读取的是哪个单元,接着读取下个单元的呢? 在数组内部,有一个指针,指针指向某一个单元. 每循环一个 ...

  2. beego学习笔记(3)

    相对复杂一点的示例: package main import "github.com/astaxie/beego" type MainController struct{ beeg ...

  3. K&R《C语言》书中的一个Bug

    最近在重温K&R的C语言圣经,第二章中的练习题2-2引起了我的注意. 原题是: Write a loop equivalent to the for loop above without us ...

  4. CSU 1102 多连块拼图

    多连块拼图 时间限制:1000 ms  |  内存限制:65535 KB 难度:4 描述     多连块是指由多个等大正方形边与边连接而成的平面连通图形.         ———— 维基百科      ...

  5. PHP 执行系统外部命令的函数- system() exec() passthru()

    PHP 执行系统外部命令的函数: system() exec() passthru()区别:system() 输出并返回最后一行shell结果.exec() 不输出结果,返回最后一行shell结果,所 ...

  6. 如何在获取celery中的任务执行情况

    开始以为在flower中获取,原来flower也是从celery中获取的. 如果直接用celery命令,一直会提示拒绝连接. 网上说了,用django命令就可以的. 于是试了下,OK了. 这样,至少可 ...

  7. web2.0 HW3 相关阅读材料

    W3C官网的材料简直看花眼……除了挂出的文档,www-style@w3.org的一堆邮件也藏着很多宝,但是看下来又眼花了……(好在宝们基本都被W3C吸收了=.=) 主要来自W3C的官网w3.org的一 ...

  8. live555例子程序编译连接时发现函数未定义问题

    1 调整连接库的顺序. 2 更新头文件与所用的库一致

  9. cordova 使用WKWebView 适配iphoneX及解决不能拨打电话问题

    先安装插件 cordova-plugin-wkwebview-engine 然后修改插件中CDVWKWebViewEngine.m文件,下面是全部代码,修改部分已经进行注释     /* Licens ...

  10. BZOJ1102 [POI2007]GRZ山峰和山谷 [BFS]

    题目传送门 GRZ山峰和山谷 Description FGD小朋友特别喜欢爬山,在爬山的时候他就在研究山峰和山谷.为了能够让他对他的旅程有一个安排,他想知道山峰和山谷的数量.给定一个地图,为FGD想要 ...