arm6410 ffmpeg+x264軟編碼+硬件編碼 jrtplib發送

從大三開始就開始做一些東西了,但是由於自己總是不善於總結,導致自己總是重複學習,看了一些大神的博客之後,覺得寫博客確實可以促進學習,所以就總結一下最近和同學做的一個畢業設計。水平有限,如有錯誤,請大家指出這個項目算是同學的畢業設計吧,在總結了一些前人的經驗之後,我覺得這個東西還是可以繼續往下做的,例如做一些自動化監控,圖像識別之類的。這些就是後話了。由於時間有限,一些代碼沒有詳細註釋或者作出解釋,以後會慢慢添加的。


這個是整個工程的源代碼一鍵make就可以了

當然前提是有FFMpeg庫和x264庫和jrtplib庫

關於這幾個庫的編譯,大家可以去百度一下,網上資料還是比較多的。

http://download.csdn.net/detail/wisha1989/5101634

(一) 圖像的採集:

      圖像採集使用的是v4l2 接口,在嵌入式linux裏面自帶了的,這一塊沒啥難度,就是網上摘抄的代碼,寫了兩接口函數:

      

#ifndef V4L2CAPTURE_H
#define V4L2CAPTURE_H
#include <stdio.h>


#include <stdlib.h>


#include <string.h>


#include <assert.h>
#include <getopt.h>
#include <fcntl.h>
#include <unistd.h>
#include <errno.h>
#include <malloc.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <sys/time.h>
#include <sys/mman.h>
#include <sys/ioctl.h>
#include <asm/types.h>
#include <linux/videodev2.h>

extern "C"
{
    #include"libavcodec/avcodec.h"
    #include"libavformat/avformat.h"
    #include"libswscale/swscale.h"
    #include"libavdevice/avdevice.h"
    #include"libavutil/avutil.h"
    #include"libavfilter/avfilter.h"


}

#define CLEAR(x) memset (&(x), 0, sizeof (x))
struct buffer
{
        void *       start;
        size_t       length;


};
int  v4l2capture_init(int width ,int height ,const char* dev );//圖象採集初始化函數,採集寬度、高度、設備路徑
int  v4l2capture(AVPacket *avpkt);                            //圖象採集函數,使用的ffmpeg裏面的數據結構AVPacket




#endif // V4L2CAPTURE_H

以上就是採集模塊的頭文件(寫得不是很規範啊,是不是應該用extern “C” 擴起來呢?),下面就是兩個函數的實現,其實真的沒啥新意,這類代碼已經很多了

就是我的攝像頭採集出來是yuv422打包格式的,但是根據h264標準,只能編碼yuv420p格式的圖像的,所以要轉換一下(這個後面會提一下的)

static int              fd              = -1;

struct buffer *         buffers         = NULL;

static unsigned int     n_buffers       = 0;

static unsigned long file_length;

static unsigned char *file_name;


void v4l2Capture_release();
static int read_frame (AVPacket *avpkt)

{

   struct v4l2_buffer buf;

   unsigned int i;



   CLEAR (buf);

   buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

   buf.memory = V4L2_MEMORY_MMAP;

   int ff = ioctl (fd, VIDIOC_DQBUF, &buf);  //緩存出列

   if(ff<0)

   fprintf(stderr,"failture = %d\n",ff); 
   assert (buf.index < n_buffers);

   fprintf(stderr,"capture frame size: %d\n\n",buffers[buf.index].length);

   uint8_t* tbuf = (uint8_t*)malloc(buffers[buf.index].length);
   memcpy(tbuf,buffers[buf.index].start,buffers[buf.index].length);

   avpkt->data = tbuf;
   avpkt->size = buffers[buf.index].length;

   ff=ioctl (fd, VIDIOC_QBUF, &buf); //緩存入列

   if(ff<0)

   printf("failture VIDIOC_QBUF\n");

   return 1;

}




int v4l2capture(AVPacket *avpkt)
{

    read_frame(avpkt);

    return 0;
}


int v4l2capture_init(int width ,int height ,const char* dev )
{
    struct v4l2_capability cap;

    struct v4l2_format fmt;

    unsigned int i;

    enum v4l2_buf_type type;



     //////////////////////////////
     ///////open video dev ////////
     ////////////////////////////

   fd = open(dev,O_RDWR); // open the dev with blocked
	
    if(fd == -1)
    {
        fprintf(stderr,"open dev failed!\n");
    }
    else
    {
    	fprintf(stderr,"open dev success! dev:%s\n\n",dev);
    }
    int ff=ioctl (fd, VIDIOC_QUERYCAP, &cap);//獲取攝像頭參數

    if(ff<0)

    fprintf(stderr,"failture VIDIOC_QUERYCAP\n");



           struct v4l2_fmtdesc fmt1;

            int ret;

           memset(&fmt1, 0, sizeof(fmt1));

           fmt1.index = 0;

           fmt1.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

           //////////////////////////////
           //////enum the supported ////
           //////video    type     ////
           ////////////////////////////

           while ((ret = ioctl(fd, VIDIOC_ENUM_FMT, &fmt1)) == 0)
           {

               fmt1.index++;

               fprintf(stderr,"{ pixelformat = '%c%c%c%c', description = '%s' }\n",

                             fmt1.pixelformat & 0xFF, (fmt1.pixelformat >> 8) & 0xFF,

                             (fmt1.pixelformat >> 16) & 0xFF, (fmt1.pixelformat >> 24) & 0xFF,

                             fmt1.description);



        }

           //////////////////////////////
           //////set the video type////
           ////////////////////////////

 CLEAR (fmt);

 fmt.type                = V4L2_BUF_TYPE_VIDEO_CAPTURE;

 fmt.fmt.pix.width       = width;

 fmt.fmt.pix.height      = height;

 fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;//V4L2_PIX_FMT_YVU420;//V4L2_PIX_FMT_YUYV;

 fmt.fmt.pix.field       = V4L2_FIELD_INTERLACED;

 ff = ioctl (fd, VIDIOC_S_FMT, &fmt); //設置圖像格式

 if(ff<0)

 fprintf(stderr,"failture VIDIOC_S_FMT\n");

 file_length = fmt.fmt.pix.bytesperline * fmt.fmt.pix.height; //計算圖片大小



 struct v4l2_requestbuffers req;

 CLEAR (req);

 req.count               = 5;

 req.type                = V4L2_BUF_TYPE_VIDEO_CAPTURE;

 req.memory              = V4L2_MEMORY_MMAP;
 ioctl (fd, VIDIOC_REQBUFS, &req); //申請緩衝,count是申請的數量

 if(ff<0)

 fprintf(stderr,"failture VIDIOC_REQBUFS\n");

 if (req.count < 1)

 fprintf(stderr,"Insufficient buffer memory\n");



 buffers = (buffer*)calloc (req.count, sizeof (*buffers));//內存中建立對應空間



 for (n_buffers = 0; n_buffers < req.count; ++n_buffers){

     struct v4l2_buffer buf;   //驅動中的一幀

     CLEAR (buf);

     buf.type        = V4L2_BUF_TYPE_VIDEO_CAPTURE;

     buf.memory      = V4L2_MEMORY_MMAP;

     buf.index       = n_buffers;



     if (-1 == ioctl (fd, VIDIOC_QUERYBUF, &buf)) //映射用戶空間

      printf ("VIDIOC_QUERYBUF error\n");



     buffers[n_buffers].length = buf.length;

     buffers[n_buffers].start =

     mmap (NULL /* start anywhere */,    //通過mmap建立映射關係

      buf.length,

      PROT_READ | PROT_WRITE /* required */,

      MAP_SHARED /* recommended */,

      fd, buf.m.offset);
     if (MAP_FAILED == buffers[n_buffers].start)

        fprintf (stderr,"mmap failed\n");

            }

 for (i = 0; i < n_buffers; ++i)

 {

    struct v4l2_buffer buf;

    CLEAR (buf);



    buf.type        = V4L2_BUF_TYPE_VIDEO_CAPTURE;

    buf.memory      = V4L2_MEMORY_MMAP;

    buf.index       = i;



    if (-1 == ioctl (fd, VIDIOC_QBUF, &buf))//申請到的緩衝進入列隊

     fprintf (stderr,"VIDIOC_QBUF failed\n");

 }


 enum v4l2_buf_type v4l2type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
 int fret = ioctl (fd, VIDIOC_STREAMON, &v4l2type);


}



以上就是採集部分。


(二)編碼部分(軟編碼)

   編碼部分採用了軟編碼和硬編碼。軟編碼部分採用了ffmpeg+x264的方式進行編碼的,因爲我只用過ffmpeg ,對x264並不熟悉,所以還是使用ffmpeg進行編碼,但是與此同時又遇到了一些問題,最明顯的問題就是調整ffmpeg  AVCodecContext 裏面的參數的時候,並不一定能夠實現參數的改變,因爲ffmpeg 在調用x264的函數的時候,對參數進行了檢測,如果參數檢測沒有通過的話,就會使用默認參數,這個就是一個比較頭痛的問題,如果你真的想了解h264的話,建議還是深入瞭解一下x264的參數。但是我是求速度完成項目的,就對x264的一些源代碼進行了修改,完成了參數的一些調整(感覺有點暴力哈)。

由於代碼結構並不是很好,所以軟編碼部分可能有點亂,總共分爲以下幾個函數

#ifndef CAPTUREANDCOMPRESS_H
#define CAPTUREANDCOMPRESS_H


extern "C"
{
    #include"libavcodec/avcodec.h"
    #include"libavformat/avformat.h"
    #include"libswscale/swscale.h"
    #include"libavdevice/avdevice.h"
    #include"libavutil/avutil.h"
    #include"libavfilter/avfilter.h"


}
void codec_init();

void *capture_video(void*);     //採集線程函數
void captureAndCompress();      //採集和編碼函數
void *compress_video(void *);   //編碼線程函數


#endif // CAPTUREANDCOMPRESS_H


以下是上面函數的實現:


#include"captureAndcompress.h"
#include"sys/stat.h"
#include"rtpsend.h"
#include"v4l2capture.h"
#include"mfcapi.h"
//#include <sys/wait.h>
#include"time.h"

extern "C"
{
    #include"pthread.h"
    #include"semaphore.h"
    #include"unistd.h"
}
#define PICWIDTH   320
#define PICHEIGHT  240
#define buf_size 5

clock_t switch_time_t1, switch_time_t2;




AVCodec *codec;
AVCodecContext *c= NULL;

AVFrame *picture;
uint8_t *outbuf, *picture_buf;
uint32_t outbuf_size = 0;
FILE *f = NULL;

FILE *fdebug = NULL;

sem_t emptyFrameSpace;
sem_t fullFrameSpace;
pthread_mutex_t mutex;
struct SwsContext   *color_convert_ctx ;



static void 	*enc_handle;
static int 	 enc_frame_cnt = 0;


unsigned char** yuv422BUf;


void thread_debug(char* content,int d)
{
    if(!fdebug)
    fdebug = fopen("debugdata.txt","wb");

    fprintf(fdebug,"%s%d\n",content,d);

}


static void yuv422BUf_init()
{
    yuv422BUf = (unsigned char**)malloc(buf_size*(sizeof(unsigned char**)));
    int i;
    for(i = 0; i< buf_size;i++)
    {
        yuv422BUf[i] = (unsigned char*)malloc(PICWIDTH * PICHEIGHT * 3  * sizeof(unsigned char));
    }
}
static void YUV422_to_I420(unsigned char *yuv422,unsigned char *I420[],unsigned int width,unsigned int height)
{
    unsigned char *data[2];
     int ylinesize[4];
     int ilinesize[4];
     clock_t t1,t2;
     t1 = clock();
    ylinesize[0] = width  *  2;
    ylinesize[1] = width  /  2;
    ylinesize[2] = height /  2;

    ilinesize[0] = width;
    ilinesize[1] = width / 2;
    ilinesize[2] = width / 2;
    data[0] = yuv422;
    if(color_convert_ctx)
    {
        sws_scale(color_convert_ctx,data,ylinesize,0,height,I420,ilinesize);
       // data[1] = I420[2];
       // I420[2] = I420[1];
       // I420[1] = data[1];

    }
    else
    {
        fprintf(stderr,"yuv422 to yuv420 failed!\n\n");
    }
t2 = clock();
fprintf(stderr,"yuv422 to yuv420 time cost: %d ms\n\n",(t2-t1));
}




void codec_init()
{

  //  thread_debug("HAHA",10);

    rtp_params_init();
    avcodec_init();
    av_register_all();

    /**********ffmpeg-init***********/

    codec = avcodec_find_encoder(CODEC_ID_H264);

    c = avcodec_alloc_context();

    picture = avcodec_alloc_frame();
    int br = 2*1000;
    c->rc_lookahead = 0;  // can not set the true param
    c->bit_rate = br;
    c->rc_min_rate =br;
                   c->rc_max_rate = br;
                   c->bit_rate_tolerance = br;
                  // c->rc_buffer_size=br;
                   c->rc_initial_buffer_occupancy = c->rc_buffer_size*3/4;
                   c->rc_buffer_aggressivity= (float)1.0;
                   c->rc_initial_cplx= 0.5;
             c->codec_type =  AVMEDIA_TYPE_VIDEO;

             c->dct_algo = 0;
                       c->me_pre_cmp=2;
                       c->cqp = 30;
                       c->me_method =7;
                       c->qmin = 3;
                       c->qmax = 31;
                       c->max_qdiff = 3;
                       c->qcompress = 0.3; //important param
                       c->qblur = 0.3;
                       c->nsse_weight = 8;
                       c->i_quant_factor = (float)0.8;
                       c->b_quant_factor = 1.25;
                       c->b_quant_offset = 1.25;

   // c->bit_rate_tolerance = 9000*1000;
    c->width  = PICWIDTH;
    c->height = PICHEIGHT;

    //c->max_b_frames = 0;
    c->time_base.num = 1;
    c->time_base.den = 10;
   // c->flags2 = c->flags2&!CODEC_FLAG2_MBTREE;
    c->gop_size = 10;
    c->max_b_frames = 0;

    c->pix_fmt = PIX_FMT_YUV420P;

    if(avcodec_open(c,codec)<0)
    {
        fprintf(stderr,"open encoder fialed!\n");
        exit(1);
    }
    enc_handle = mfc_encoder_init(PICWIDTH, PICHEIGHT, 25, 1000, 20);


    v4l2capture_init(PICWIDTH,PICHEIGHT,"/dev/video2");


    yuv422BUf_init();


    outbuf_size = 10000000;
    outbuf = (uint8_t*)av_malloc(outbuf_size);

    int size = c->width * c->height;

    picture_buf = (uint8_t*)av_malloc((size*3)/2);

    picture->data[0] = picture_buf;
    picture->data[1] = picture->data[0] + size;
    picture->data[2] = picture->data[1] + size / 4;
    picture->linesize[0] = c->width;
    picture->linesize[1] = c->width / 2;
    picture->linesize[2] = c->width / 2;




    color_convert_ctx =   sws_getContext(   PICWIDTH,
                                            PICHEIGHT,
                                            PIX_FMT_YUYV422,
                                            PICWIDTH, PICHEIGHT,
                                            PIX_FMT_YUV420P,
                                            SWS_FAST_BILINEAR,
                                            NULL, NULL, NULL  );

    sem_init(&emptyFrameSpace,0,buf_size);
    sem_init(&fullFrameSpace,0,0);
    pthread_mutex_init(&mutex,NULL);





}


void *capture_video(void *)
{
    int i = 0;

    while(1)
    {
         
        sem_wait(&emptyFrameSpace);
        pthread_mutex_lock(&mutex);
	clock_t pre;
        switch_time_t1 = pre = clock();
	fprintf(stderr,"switch time cost:= %d us\n",abs(switch_time_t1 - switch_time_t2));


         AVPacket avpkt;

         v4l2capture(&avpkt);

         i =  i%buf_size;

         memcpy(yuv422BUf[i],avpkt.data,avpkt.size);

         free(avpkt.data);

         avpkt.size = 0;

           i++;
          clock_t aft = clock();
	  fprintf(stderr,"capture time cost:%d\n",(aft-pre)/1000);
        
	 pthread_mutex_unlock(&mutex);
         sem_post(&fullFrameSpace);



    }
}




void *compress_video(void*)
{
    int i = 0;
    int out_size;
   
    clock_t t1,t2,t3;
 
    while(1)
    {



        sem_wait(&fullFrameSpace);
        pthread_mutex_lock(&mutex);
	switch_time_t2 = t1 = clock();
	fprintf(stderr,"switch time cost:= %d us\n",abs(switch_time_t1 - switch_time_t2));
        i = i%buf_size;
       

	
        YUV422_to_I420(yuv422BUf[i],picture->data,PICWIDTH,PICHEIGHT);      //yuv422打包格式轉換爲yuv420平面格式

      
	
        out_size = avcodec_encode_video(c,outbuf, outbuf_size, picture);    //ffmpeg編碼函數
       

	//fprintf(stderr,"encode frame size: %d\n",out_size);
        picture->pts++;
        t2 = clock();
	fprintf(stderr,"encode time cost: %d\n\n",(t2 - t1)/1000);
   

        nal_rtp_send((unsigned char*)outbuf,out_size );                       //編碼結束之後用rtp發送

     
         i++;
	t3 = clock();
	fprintf(stderr,"send time cost:%d\n",(t3-t2)/1000);
	
        pthread_mutex_unlock(&mutex);
        sem_post(&emptyFrameSpace);
	
       

    }

     
}



void captureAndCompress()
{
    pthread_t id1,id2;
    codec_init();
    if(pthread_create(&id1,NULL,capture_video,NULL))
    {
        fprintf(stderr,"creat capture thread failed!\n");
    }

    if(pthread_create(&id2,NULL,compress_video,NULL))
    {
        fprintf(stderr,"creat,compress thread failed!\n");
    }

    pthread_join(id1,NULL);
    pthread_join(id2,NULL);

  

}


以上就是軟編碼部分,其實編碼部分就包含了rtp發送的函數在裏面          nal_rtp_send((unsigned char*)outbuf,out_size ); 以上這一段代碼其實就算是整個程序的主程序代碼了。代碼結構有點混亂。。。


(三)rtp發送部分

rtp的發送採用了jrtplib(3.9.1) 這個開源庫,是用C++寫的,而且使用cmake編譯系統,所以交叉編譯有點麻煩,但是用起來還是比較簡單的。


#ifndef RTPSEND_H
#define RTPSEND_H
#include "jrtplib3/rtpsession.h"
#include "jrtplib3/rtpudpv4transmitter.h"
#include "jrtplib3/rtpipv4address.h"
#include "jrtplib3/rtpsessionparams.h"
#include "jrtplib3/rtperrors.h"

#include<time.h>
#include <stdlib.h>
#include <stdio.h>
#include <iostream>
#include <string>

#define H264                    96
#define MAX_RTP_PKT_LENGTH     1350
using namespace jrtplib;

int rtp_params_init();
void rtp_send(unsigned char *buf, int len);

void nal_rtp_send(unsigned char* buf, int len);
#endif // RTPSEND_H



以下是函數實現代碼:


#include"rtpsend.h"

static RTPSession                   sess;
static RTPUDPv4TransmissionParams   transparams;
static RTPSessionParams             sessparams;
static uint                         portbase,destport;
static uint                         destip;
static std::string                  ipstr;
static int                          status,i,num;
static int                          timeStampInc = 90000 / 20;
static long long                    currentTime;
static int                          sendFrameCount = 0;
static void checkerror(int err);
unsigned char*                      nalBuffer;
unsigned char*                      sendBuffer;

static  int  get_next_nalu(unsigned char* buf,int buf_len,unsigned char* outbuf);
static  bool find_start_code(unsigned char*buf);


int rtp_params_init()
{
    ipstr = "192.168.0.1";
    destport = 9000;
    portbase = 8000;
    num = 0;
    destip = inet_addr(ipstr.c_str());
    if (destip == INADDR_NONE)
    {
        std::cerr << "Bad IP address specified" << std::endl;
        return -1;
    }

       destip = ntohl(destip);

       sessparams.SetOwnTimestampUnit((double)(1.0f/90000.0f));
       //sessparams.SetAcceptOwnPackets(true);
       transparams.SetPortbase(portbase);
       fprintf(stderr,"SetPortbass\n");
       status = sess.Create(sessparams,&transparams);
       fprintf(stderr, "Creat session\n");
       checkerror(status);

       RTPIPv4Address addr(destip,destport);


      status = sess.AddDestination(addr);

       fprintf(stderr,"Add to Destination\n");
       checkerror(status);
       currentTime = clock();
       sendBuffer = (unsigned char*)malloc(1000*1000);
       nalBuffer = (unsigned char*)malloc(1000*1000);
}


void rtp_send(unsigned char *buf, int len)
{


    int n    = len / MAX_RTP_PKT_LENGTH;
    int last = len % MAX_RTP_PKT_LENGTH;
    if(last > 0) n++;

    int timeInc ;

    //currentTime += timeStampInc;
    //fprintf(stderr,"buf len = \n");
    if(len>0)fprintf(stderr,"send count:\n");
    char nalHead = buf[0];
    if(len < MAX_RTP_PKT_LENGTH)
    {
        status = sess.SendPacket((void*)buf,len,H264,true,timeStampInc);

    }
    else
    {
       for(int i = 0; i < n;i++)
       {

            sendBuffer[0] = (nalHead & 0x60)|28;
            sendBuffer[1] = (nalHead & 0x1f);
            
                if(0 == i)
                {
                       timeInc = timeStampInc;
                       sendBuffer[1] |= 0x80;
                       memcpy(&sendBuffer[2],&buf[i*MAX_RTP_PKT_LENGTH],MAX_RTP_PKT_LENGTH);
                       status = sess.SendPacket((void*)sendBuffer,MAX_RTP_PKT_LENGTH+2,H264,false,timeInc);
                }

            else if(i == n - 1) //send the last subpacket
            {
               timeInc = 0;
               sendBuffer[1] |= 0x40;
               memcpy(&sendBuffer[2],&buf[i*MAX_RTP_PKT_LENGTH],last);
               status = sess.SendPacket((void*)sendBuffer,last +2 ,H264,true,timeInc);
            }
            else
            {
                timeInc = 0;

               memcpy(&sendBuffer[2],&buf[i*MAX_RTP_PKT_LENGTH],MAX_RTP_PKT_LENGTH);
               status = sess.SendPacket((void*)sendBuffer,MAX_RTP_PKT_LENGTH+2 ,H264,false,timeInc);

            }

            checkerror(status);
                //sess.OnPollThreadStart();

    //        free(tempBuf);
     
            status = sess.Poll();
    //        checkerror(status);
       }
    }

}

static void checkerror(int err)
{
  if (err < 0) {
  const  char* errstr = RTPGetErrorString(err).c_str();
    printf("Error:%s//n", errstr);
    exit(-1);
  }
}


void nal_rtp_send(unsigned char* buf, int len)
{
    int pos = 0;

    while(pos < len)
    {
        int nalLen = get_next_nalu(&buf[pos],len - pos,nalBuffer);
        rtp_send(nalBuffer + 4,nalLen - 4);                         //drop the start code 00 00 00 01
        pos += nalLen;
    }

}


static int get_next_nalu(unsigned char* buf,int buf_len,unsigned char* outbuf)
{
    int pos = 0;
    bool findStartcode;
    if(buf_len > 4)
    {
        findStartcode = find_start_code(&buf[pos]);
    }
    else return 0 ;
    if(findStartcode)
    {

        do
        {
                    outbuf[pos] = buf[pos];

                    pos++;
                    findStartcode = find_start_code(&buf[pos]);
        }while(!findStartcode && (pos + 4 < buf_len));
    }

    if(findStartcode)
    {

        return pos ;
    }

    else if(pos + 4 >= buf_len)
    {
        do{outbuf[pos] = buf[pos];pos++;}while(pos < buf_len);
        return buf_len;
    }
    else return 0;
}

static bool find_start_code(unsigned char*buf)
{
  if( (buf[0] == 0)&&
      (buf[1] == 0)&&
      (buf[2] == 0)&&
      (buf[3] == 1)
          )
  {
    return true;
  }

  else
      return false;
}


(四)硬編碼函數裁減:

硬編碼部分來自於三星公司提供的資料,裏面有各種格式的編解碼api,我提取了其中h264編碼的部分,添加到我的項目中。


其實就是抽象出一下幾個函數:


#ifndef __SAMSUNG_SYSLSI_APDEV_CAM_ENC_DEC_TEST_H__
#define __SAMSUNG_SYSLSI_APDEV_CAM_ENC_DEC_TEST_H__




#ifdef __cplusplus
extern "C" {
#endif

void *mfc_encoder_init(int width, int height, int frame_rate, int bitrate, int gop_num);

void *mfc_encoder_exe(void *handle, unsigned char *yuv_buf, int frame_size, int first_frame, long *size);

void mfc_encoder_free(void *handle);

#ifdef __cplusplus
}
#endif


#endif /* __SAMSUNG_SYSLSI_APDEV_CAM_ENC_DEC_TEST_H__ */


以下是代碼實現:

#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include <fcntl.h>
#include <unistd.h>
#include <ctype.h>
#include <errno.h>
#include <signal.h>
#include <sys/mman.h>
#include <sys/time.h>
#include <sys/ioctl.h>
#include <pthread.h>
#include <linux/videodev2.h>
#include <semaphore.h>
#include "s3c_pp.h"
#include "SsbSipH264Encode.h"
#include "LogMsg.h"
#include "performance.h"
#include "MfcDriver.h"
#include "mfcapi.h"


/*#define PP_DEV_NAME		"/dev/s3c-pp"
#define FB1_WIDTH		400
#define FB1_HEIGHT		480
#define FB1_BPP			16
#define FB1_COLOR_SPACE	RGB16

#define SAMSUNG_UXGA_S5K3BA


/***************** MFC *******************/
//static void 	*enc_handle, *dec_handle;
//static int 	 enc_frame_cnt, dec_frame_cnt;

/* MFC functions */
//static void *mfc_encoder_init(int width, int height, int frame_rate, int bitrate, int gop_num);
//static void *mfc_encoder_exe(void *handle, unsigned char *yuv_buf, int frame_size, int first_frame, long *size);
//static void  mfc_encoder_free(void *handle);



/***************** etc *******************/
//#define SHARED_BUF_NUM					5
//#define MFC_LINE_BUF_SIZE_PER_INSTANCE		(204800)

/***************** MFC driver function *****************/
void *mfc_encoder_init(int width, int height, int frame_rate, int bitrate, int gop_num)
{
	int				frame_size;
	void			*handle;
	int				ret;


	frame_size	= (width * height * 3) >> 1;

	handle = SsbSipH264EncodeInit(width, height, frame_rate, bitrate, gop_num);
	if (handle == NULL) {
		LOG_MSG(LOG_ERROR, "Test_Encoder", "SsbSipH264EncodeInit Failed\n");
		return NULL;
	}

	ret = SsbSipH264EncodeExe(handle);

	return handle;
}

void *mfc_encoder_exe(void *handle, unsigned char *yuv_buf, int frame_size, int first_frame, long *size)
{
	unsigned char	*p_inbuf, *p_outbuf;
	int				hdr_size;
	int				ret;


	p_inbuf = SsbSipH264EncodeGetInBuf(handle, 0);

	memcpy(p_inbuf, yuv_buf, frame_size);

	ret = SsbSipH264EncodeExe(handle);
	if (first_frame) {
		SsbSipH264EncodeGetConfig(handle, H264_ENC_GETCONF_HEADER_SIZE, &hdr_size);
		//printf("Header Size : %d\n", hdr_size);
	}

	p_outbuf = SsbSipH264EncodeGetOutBuf(handle, size);

	return p_outbuf;
}

void mfc_encoder_free(void *handle)
{
	SsbSipH264EncodeDeInit(handle);
}




其他的就是三星自己的資料了,可以自己去看看。
調用這個api的時候(是在上面軟編碼 的 編碼線程函數部分,替換掉ffmpeg編碼函數):
void *compress_video(void*)
{
    int i = 0;
    int out_size;
   
    clock_t t1,t2,t3;
 
    while(1)
    {



        sem_wait(&fullFrameSpace);
        pthread_mutex_lock(&mutex);
	switch_time_t2 = t1 = clock();
	fprintf(stderr,"switch time cost:= %d us\n",abs(switch_time_t1 - switch_time_t2));
        i = i%buf_size;
       

	
        YUV422_to_I420(yuv422BUf[i],picture->data,PICWIDTH,PICHEIGHT);

      
	
        //out_size = avcodec_encode_video(c,outbuf, outbuf_size, picture);
        long oubuf_size_hard = 0;
	if(enc_frame_cnt%20 == 0)
	{
		outbuf = (uint8_t*)mfc_encoder_exe(enc_handle, picture->data[0], PICWIDTH*PICHEIGHT*3/2, 1, &oubuf_size_hard);
	}
	else
	{
		outbuf = (uint8_t*)mfc_encoder_exe(enc_handle, picture->data[0], PICWIDTH*PICHEIGHT*3/2, 0, &oubuf_size_hard);	
	}
	enc_frame_cnt++;

	//fprintf(stderr,"encode frame size: %d\n",out_size);
        picture->pts++;
        t2 = clock();
	fprintf(stderr,"encode time cost: %d\n\n",(t2 - t1)/1000);
   

        nal_rtp_send((unsigned char*)outbuf,oubuf_size_hard);

     
         i++;
	t3 = clock();
	fprintf(stderr,"send time cost:%d\n",(t3-t2)/1000);
	
        pthread_mutex_unlock(&mutex);
        sem_post(&emptyFrameSpace);
	usleep(1000*10);
       

    }

     
}



這個就這樣了,有空再補充,詳細一點。











發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章