Android平臺RTMP多實例推送的幾種情況探討

好多開發者提到,如何實現Android平臺,多實例推送,多實例推送,有幾種理解:

  1. 多路編碼,多個實例分別推送到不同的RTMP URL(如Android採集板卡同時接2路出去);
  2. 同一路編碼,多個實例分別推送到不同的RTMP URL(如推送到內網、外網不同的RTMP服務器);
  3. 部分路編碼、部分路對接編碼後的H.264/AAC數據,多個實例分別推送到不同的RTMP URL(混合推)。

目前,市面上的大多設計,都不夠靈活,以下以“Android回調編碼後的音視頻數據”爲例,推送一路原始的編碼後的RTMP數據出去,然後,編碼後的數據,回調到上層,再啓動一個新的Publisher實例,推到新的RTMP地址(當然只是業務展示,實際可用於對接第三方系統,如GB28181或其他服務):

具體流程如下:

  1. 設置音視頻callback

對應接口:

	/**
	 * Set Audio Encoded Data Callback.
	 *
	 * @param audio_encoded_data_callback: Audio Encoded Data Callback.
	 *
	 * @return {0} if successful
	 */
	public native int SmartPublisherSetAudioEncodedDataCallback(long handle, Object audio_encoded_data_callback);

	/**
	 * Set Video Encoded Data Callback.
	 *
	 * @param video_encoded_data_callback: Video Encoded Data Callback.
	 *
	 * @return {0} if successful
	 */
	public native int SmartPublisherSetVideoEncodedDataCallback(long handle, Object video_encoded_data_callback);
設置回調:

libPublisher.SmartPublisherSetAudioEncodedDataCallback(publisherHandle, new PublisherAudioEncodedDataCallback());

libPublisher.SmartPublisherSetVideoEncodedDataCallback(publisherHandle, new PublisherVideoEncodedDataCallback());
  1. 實現 PublisherAudioEncodedDataCallback 和 PublisherVideoEncodedDataCallback:
    class PublisherAudioEncodedDataCallback implements NTAudioDataCallback
    {
        private int audio_buffer_size = 0;
        private int param_info_size = 0;

        private ByteBuffer audio_buffer_ = null;
        private ByteBuffer parameter_info_ = null;

        @Override
        public ByteBuffer getAudioByteBuffer(int size)
        {
            //Log.i("getAudioByteBuffer", "size: " + size);

            if( size < 1 )
            {
                return null;
            }

            if ( size <= audio_buffer_size && audio_buffer_ != null )
            {
                return audio_buffer_;
            }

            audio_buffer_size = size + 512;
            audio_buffer_size = (audio_buffer_size+0xf) & (~0xf);

            audio_buffer_ = ByteBuffer.allocateDirect(audio_buffer_size);

            // Log.i("getAudioByteBuffer", "size: " + size + " buffer_size:" + audio_buffer_size);

            return audio_buffer_;
        }

        @Override
        public ByteBuffer getAudioParameterInfo(int size)
        {
            //Log.i("getAudioParameterInfo", "size: " + size);

            if(size < 1)
            {
                return null;
            }

            if ( size <= param_info_size &&  parameter_info_ != null )
            {
                return  parameter_info_;
            }

            param_info_size = size + 32;
            param_info_size = (param_info_size+0xf) & (~0xf);

            parameter_info_ = ByteBuffer.allocateDirect(param_info_size);

            //Log.i("getAudioParameterInfo", "size: " + size + " buffer_size:" + param_info_size);

            return parameter_info_;
        }

        public void onAudioDataCallback(int ret, int audio_codec_id, int sample_size, int is_key_frame, long timestamp, int sample_rate, int channel, int parameter_info_size, long reserve)
        {
            Log.i("onAudioDataCallback", "ret: " + ret + ", audio_codec_id: " + audio_codec_id + ", sample_size: " + sample_size + ", timestamp: " + timestamp +
            		",sample_rate:" + sample_rate + ",chn: " + channel + ", parameter_info_size:" + parameter_info_size);

            if ( audio_buffer_ == null)
                return;

            audio_buffer_.rewind();

            if ( ret == 0 && publisherHandle2 != 0 ) {
                libPublisher.SmartPublisherPostAudioEncodedData(publisherHandle2, audio_codec_id, audio_buffer_, sample_size, is_key_frame, timestamp, parameter_info_, parameter_info_size);
            }
        }
    }

    class PublisherVideoEncodedDataCallback implements NTVideoDataCallback
    {
        private int video_buffer_size = 0;

        private ByteBuffer video_buffer_ = null;

        @Override
        public ByteBuffer getVideoByteBuffer(int size)
        {
            //Log.i("getVideoByteBuffer", "size: " + size);

            if( size < 1 )
            {
                return null;
            }

            if ( size <= video_buffer_size &&  video_buffer_ != null )
            {
                return  video_buffer_;
            }

            video_buffer_size = size + 1024;
            video_buffer_size = (video_buffer_size+0xf) & (~0xf);

            video_buffer_ = ByteBuffer.allocateDirect(video_buffer_size);

            // Log.i("getVideoByteBuffer", "size: " + size + " buffer_size:" + video_buffer_size);

            return video_buffer_;
        }

        public void onVideoDataCallback(int ret, int video_codec_id, int sample_size, int is_key_frame, long timestamp, int width, int height, long presentation_timestamp)
        {
            Log.i("onVideoDataCallback", "ret: " + ret + ", video_codec_id: " + video_codec_id + ", sample_size: " + sample_size + ", is_key_frame: "+ is_key_frame +  ", timestamp: " + timestamp +
            		",width: " + width + ", height:" + height + ",presentation_timestamp:" + presentation_timestamp);

            if ( video_buffer_ == null)
                return;

            video_buffer_.rewind();

            if ( ret == 0 && publisherHandle2 !=0 ) {

                libPublisher.SmartPublisherPostVideoEncodedData(publisherHandle2, video_codec_id, video_buffer_, sample_size, is_key_frame, timestamp, presentation_timestamp);
            }
        }
    }
  1. 提供開始回調數據和停止回調數據接口:
	/**
	 * Start output Encoded Data(用於編碼後的音視頻數據回調)
	 *
	 * @return {0} if successful
	 */
	public native int SmartPublisherStartOutputEncodedData(long handle);

	/**
	 *  Stop output Encoded Data
	 *
	 * @return {0} if successful
	 */
	public native int SmartPublisherStopOutputEncodedData(long handle);
  1. 上層demo調用實例:
    class ButtonEncodedDataCallbackListener implements OnClickListener {
        public void onClick(View v) {
            if (isEncodedDatacallbackRunning) {
                stopEncodedDataCallback();

                if (!isPushing && !isRTSPPublisherRunning && !isRecording) {
                    ConfigControlEnable(true);
                }

                btnEncodedDataCallback.setText("啓動編碼數據回調");
                isEncodedDatacallbackRunning = false;

                if (publisherHandle2 != 0) {
                   libPublisher.SmartPublisherStopPublisher(publisherHandle2);
                    libPublisher.SmartPublisherClose(publisherHandle2);
                    publisherHandle2 = 0;
                }

                return;
            }

            Log.i(TAG, "onClick start encoded data callback..");

            if (libPublisher == null)
                return;

            if (!isPushing && !isRTSPPublisherRunning && !isRecording) {
                InitAndSetConfig();
            }

            libPublisher.SmartPublisherSetAudioEncodedDataCallback(publisherHandle, new PublisherAudioEncodedDataCallback());
            libPublisher.SmartPublisherSetVideoEncodedDataCallback(publisherHandle, new PublisherVideoEncodedDataCallback());

            int startRet = libPublisher.SmartPublisherStartOutputEncodedData(publisherHandle);
            if (startRet != 0) {
                isEncodedDatacallbackRunning = false;

                Log.e(TAG, "Failed to start encoded data callback.");
                return;
            }

            if (!isPushing && !isRTSPPublisherRunning && !isRecording) {
                if (pushType == 0 || pushType == 1) {
                    CheckInitAudioRecorder();    //enable pure video publisher..
                }

                ConfigControlEnable(false);
            }

            btnEncodedDataCallback.setText("停止編碼數據回調");
            isEncodedDatacallbackRunning = true;

            int audio_opt = 2;
            int video_opt = 2;

            publisherHandle2 = libPublisher.SmartPublisherOpen(myContext, audio_opt, video_opt,
                    videoWidth, videoHeight);

            if (publisherHandle2 == 0) {
                Log.e(TAG, "sdk open failed!");
                return;
            }

            String relayUrl = "rtmp://player.daniulive.com:1935/hls/stream8888";

            libPublisher.SmartPublisherSetURL(publisherHandle2, relayUrl);

            libPublisher.SmartPublisherStartPublisher(publisherHandle2);
        }
    }

    ;
    //停止編碼後數據回調
    private void stopEncodedDataCallback() {
        if(!isEncodedDatacallbackRunning)
        {
            return;
        }
        if (!isPushing && !isRTSPPublisherRunning && !isRecording) {
            if (audioRecord_ != null) {
                Log.i(TAG, "stopRecorder, call audioRecord_.StopRecording..");

                audioRecord_.Stop();

                if (audioRecordCallback_ != null) {
                    audioRecord_.RemoveCallback(audioRecordCallback_);
                    audioRecordCallback_ = null;
                }

                audioRecord_ = null;
            }
        }

        if (libPublisher != null) {
            libPublisher.SmartPublisherStopOutputEncodedData(publisherHandle);
        }

        if (!isPushing && !isRTSPPublisherRunning && !isRecording) {
            if (publisherHandle != 0) {
                if (libPublisher != null) {
                    libPublisher.SmartPublisherClose(publisherHandle);
                    publisherHandle = 0;
                }
            }
        }
    }

以上demo爲了便於演示多實例效果,另啓了個新的推送實例(對應新的publisherHandle),音視頻編碼後的數據,通過新的實例,調用編碼後的音視頻數據接口,繼續推RTMP出去,從而實現多實例推送目的。

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章