視頻推流

1、

JNIEXPORT void JNICALL Java_com_dongnaoedu_live_jni_PushNative_fireVideo
        (JNIEnv *env, jobject jobj, jbyteArray buffer){
    //視頻數據轉爲YUV420P
    //NV21->YUV420P
    jbyte* nv21_buffer = (*env)->GetByteArrayElements(env,buffer,NULL);
    jbyte* u = pic_in.img.plane[1];
    jbyte* v = pic_in.img.plane[2];
    //nv21 4:2:0 Formats, 12 Bits per Pixel
    //nv21與yuv420p,y個數一致,uv位置對調
    //nv21轉yuv420p  y = w*h,u/v=w*h/4
    //nv21 = yvu yuv420p=yuv y=y u=y+1+1 v=y+1
    memcpy(pic_in.img.plane[0], nv21_buffer, y_len);
    int i;
    for (i = 0; i < u_len; i++) {
        *(u + i) = *(nv21_buffer + y_len + i * 2 + 1);
        *(v + i) = *(nv21_buffer + y_len + i * 2);
    }

    //h264編碼得到NALU數組
    x264_nal_t *nal = NULL; //NAL
    int n_nal = -1; //NALU的個數
    //進行h264編碼
    if(x264_encoder_encode(video_encode_handle,&nal, &n_nal,&pic_in,&pic_out) < 0){
        LOGE("%s","編碼失敗");
        return;
    }
    //使用rtmp協議將h264編碼的視頻數據發送給流媒體服務器
    //幀分爲關鍵幀和普通幀,爲了提高畫面的糾錯率,關鍵幀應包含SPS和PPS數據
    int sps_len , pps_len;
    unsigned char sps[100];
    unsigned char pps[100];
    memset(sps,0,100);
    memset(pps,0,100);

    //遍歷NALU數組,根據NALU的類型判斷
    for(i=0; i < n_nal; i++){
        if(nal[i].i_type == NAL_SPS){
            //複製SPS數據
            sps_len = nal[i].i_payload - 4;
            memcpy(sps,nal[i].p_payload + 4,sps_len); //不復制四字節起始碼
        }else if(nal[i].i_type == NAL_PPS){
            //複製PPS數據
            pps_len = nal[i].i_payload - 4;
            memcpy(pps,nal[i].p_payload + 4,pps_len); //不復制四字節起始碼

            //發送序列信息
            //h264關鍵幀會包含SPS和PPS數據
            add_264_sequence_header(pps,sps,pps_len,sps_len);

        }else{
            //發送幀信息
            add_264_body(nal[i].p_payload,nal[i].i_payload);
        }

    }
}

2、 


void *push_thread(void * arg){
    //建立RTMP連接
    RTMP *rtmp = RTMP_Alloc();
    if(!rtmp){
        LOGE("rtmp初始化失敗");
        goto end;
    }
    RTMP_Init(rtmp);
    rtmp->Link.timeout = 5; //連接超時的時間
    //設置流媒體地址
    RTMP_SetupURL(rtmp,rtmp_path);
    //發佈rtmp數據流
    RTMP_EnableWrite(rtmp);
    //建立連接
    if(!RTMP_Connect(rtmp,NULL)){
        LOGE("%s","RTMP 連接失敗");
        goto end;
    }
    //計時
    start_time = RTMP_GetTime();
    if(!RTMP_ConnectStream(rtmp,0)){ //連接流
        goto end;
    }
    for(;;){
        //發送
        pthread_mutex_lock(&mutex);
        pthread_cond_wait(&cond,&mutex);
        //取出隊列中的RTMPPacket
        RTMPPacket *packet = queue_get_first();
        if(packet){
            queue_delete_first(); //移除
            packet->m_nInfoField2 = rtmp->m_stream_id; //RTMP協議,stream_id數據
            int i = RTMP_SendPacket(rtmp,packet,TRUE); //TRUE放入librtmp隊列中,並不是立即發送
            if(!i){
                LOGE("RTMP 斷開");
                RTMPPacket_Free(packet);
                pthread_mutex_unlock(&mutex);
                goto end;
            }
            RTMPPacket_Free(packet);
        }

        pthread_mutex_unlock(&mutex);
    }
    end:
    LOGI("%s","釋放資源");
    RTMP_Close(rtmp);
    RTMP_Free(rtmp);
    return 0;
}

 

3、

/**
 * 發送h264幀信息
 */
void add_264_body(unsigned char *buf ,int len){
    //去掉起始碼(界定符)
    if(buf[2] == 0x00){  //00 00 00 01
        buf += 4;
        len -= 4;
    }else if(buf[2] == 0x01){ // 00 00 01
        buf += 3;
        len -= 3;
    }
    int body_size = len + 9;
    RTMPPacket *packet = malloc(sizeof(RTMPPacket));
    RTMPPacket_Alloc(packet,body_size);

    unsigned char * body = packet->m_body;
    //當NAL頭信息中,type(5位)等於5,說明這是關鍵幀NAL單元
    //buf[0] NAL Header與運算,獲取type,根據type判斷關鍵幀和普通幀
    //00000101 & 00011111(0x1f) = 00000101
    int type = buf[0] & 0x1f;
    //Inter Frame 幀間壓縮
    body[0] = 0x27;//VideoHeaderTag:FrameType(2=Inter Frame)+CodecID(7=AVC)
    //IDR I幀圖像
    if (type == NAL_SLICE_IDR) {
        body[0] = 0x17;//VideoHeaderTag:FrameType(1=key frame)+CodecID(7=AVC)
    }
    //AVCPacketType = 1
    body[1] = 0x01; /*nal unit,NALUs(AVCPacketType == 1)*/
    body[2] = 0x00; //composition time 0x000000 24bit
    body[3] = 0x00;
    body[4] = 0x00;

    //寫入NALU信息,右移8位,一個字節的讀取?
    body[5] = (len >> 24) & 0xff;
    body[6] = (len >> 16) & 0xff;
    body[7] = (len >> 8) & 0xff;
    body[8] = (len) & 0xff;

    /*copy data*/
    memcpy(&body[9], buf, len);

    packet->m_hasAbsTimestamp = 0;
    packet->m_nBodySize = body_size;
    packet->m_packetType = RTMP_PACKET_TYPE_VIDEO;//當前packet的類型:Video
    packet->m_nChannel = 0x04;
    packet->m_headerType = RTMP_PACKET_SIZE_LARGE;
//	packet->m_nTimeStamp = -1;
    packet->m_nTimeStamp = RTMP_GetTime() - start_time;//記錄了每一個tag相對於第一個tag(File Header)的相對時間
    add_rtmp_packet(packet);

}

 

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章