PeerConnectionFactory 的初始化流程分析

----------------------------------------------------------------------------------------------------------------------------------------

一分鐘快速搭建 rtmpd 服務器: https://blog.csdn.net/freeabc/article/details/102880984

軟件下載地址: http://www.qiyicc.com/download/rtmpd.rar

github 地址:https://github.com/superconvert/smart_rtmpd

-----------------------------------------------------------------------------------------------------------------------------------------

PeerConnectionFactory 的初始化流程分析

//--------------------------------------------------------------------------------------------
// PeerConnectionFactory 的初始化流程分析
//--------------------------------------------------------------------------------------------
1. 

void CallActivity::onCreate(Bundle savedInstanceState) {
    peerConnectionClient = new PeerConnectionClient(
        getApplicationContext(),
        eglBase,
        roomUri.toString(),
        peerConnectionParameters,
        CallActivity.this);
        
    peerConnectionClient.createPeerConnectionFactory(options);
}

2.

PeerConnectionClient::PeerConnectionClient(Context appContext, EglBase eglBase,
    String roomServerUrl, PeerConnectionParameters peerConnectionParameters, PeerConnectionEvents events)
    PeerConnectionFactory.initialize(
        PeerConnectionFactory.InitializationOptions.builder(appContext)
        .setFieldTrials(fieldTrials)
        .setEnableInternalTracer(true)
        .createInitializationOptions());
        
        
void PeerConnectionFactory::initialize(InitializationOptions options) {
    // 這個是加載 JNI 庫,具體代碼就不帖了
    NativeLibrary.initialize(options.nativeLibraryLoader, options.nativeLibraryName);
    nativeInitializeAndroidGlobals();
    nativeInitializeFieldTrials(options.fieldTrials);
    if (options.enableInternalTracer && !internalTracerInitialized) {
        initializeInternalTracer();
    }
    if (options.loggable != null) {
        Logging.injectLoggable(options.loggable, options.loggableSeverity);
        nativeInjectLoggable(new JNILogging(options.loggable), options.loggableSeverity.ordinal());
    }
}

3.

void PeerConnectionClient::createPeerConnectionFactory(PeerConnectionFactory.Options options) {
    if (factory != null) {
        throw new IllegalStateException("PeerConnectionFactory has already been constructed");
    }
    executor.execute(() -> createPeerConnectionFactoryInternal(options));
}

void PeerConnectionClient::createPeerConnectionFactoryInternal(PeerConnectionFactory.Options options) {
    final VideoEncoderFactory encoderFactory;
    final VideoDecoderFactory decoderFactory;

    if (peerConnectionParameters.videoCodecHwAcceleration) {
      encoderFactory = new DefaultVideoEncoderFactory(
          rootEglBase.getEglBaseContext(), true /* enableIntelVp8Encoder */, enableH264HighProfile);
      decoderFactory = new DefaultVideoDecoderFactory(rootEglBase.getEglBaseContext());
    } else {
      encoderFactory = new SoftwareVideoEncoderFactory();
      decoderFactory = new SoftwareVideoDecoderFactory();
    }
    factory = PeerConnectionFactory.builder()
        .setOptions(options)
        .setAudioDeviceModule(adm)
        .setVideoEncoderFactory(encoderFactory)
        .setVideoDecoderFactory(decoderFactory)
        .createPeerConnectionFactory();
}

PeerConnectionFactory PeerConnectionFactory::createPeerConnectionFactory() {
    return nativeCreatePeerConnectionFactory(ContextUtils.getApplicationContext(), options,
        audioDeviceModule.getNativeAudioDeviceModulePointer(),
        audioEncoderFactoryFactory.createNativeAudioEncoderFactory(),
        audioDecoderFactoryFactory.createNativeAudioDecoderFactory(), videoEncoderFactory,
        videoDecoderFactory,
        audioProcessingFactory == null ? 0 : audioProcessingFactory.createNative(),
        fecControllerFactoryFactory == null ? 0 : fecControllerFactoryFactory.createNative(),
        networkControllerFactoryFactory == null
            ? 0 : networkControllerFactoryFactory.createNativeNetworkControllerFactory(),
        networkStatePredictorFactoryFactory == null
            ? 0 : networkStatePredictorFactoryFactory.createNativeNetworkStatePredictorFactory(),
        mediaTransportFactoryFactory == null
            ? 0 : mediaTransportFactoryFactory.createNativeMediaTransportFactory(),
        neteqFactoryFactory == null ? 0 : neteqFactoryFactory.createNativeNetEqFactory());
}
JNI_GENERATOR_EXPORT jobject
    Java_org_webrtc_PeerConnectionFactory_nativeCreatePeerConnectionFactory(
    JNIEnv* env,
    jclass jcaller,
    jobject context,
    jobject options,
    jlong nativeAudioDeviceModule,
    jlong audioEncoderFactory,
    jlong audioDecoderFactory,
    jobject encoderFactory,
    jobject decoderFactory,
    jlong nativeAudioProcessor,
    jlong nativeFecControllerFactory,
    jlong nativeNetworkControllerFactory,
    jlong nativeNetworkStatePredictorFactory,
    jlong mediaTransportFactory,
    jlong neteqFactory) {
  return JNI_PeerConnectionFactory_CreatePeerConnectionFactory(env,
      base::android::JavaParamRef<jobject>(env, context), base::android::JavaParamRef<jobject>(env,
      options), nativeAudioDeviceModule, audioEncoderFactory, audioDecoderFactory,
      base::android::JavaParamRef<jobject>(env, encoderFactory),
      base::android::JavaParamRef<jobject>(env, decoderFactory), nativeAudioProcessor,
      nativeFecControllerFactory, nativeNetworkControllerFactory,
      nativeNetworkStatePredictorFactory, mediaTransportFactory, neteqFactory).Release();
}

./sdk/android/src/jni/pc/peer_connection_factory.cc

static ScopedJavaLocalRef<jobject>
JNI_PeerConnectionFactory_CreatePeerConnectionFactory(
    JNIEnv* jni,
    const JavaParamRef<jobject>& jcontext,
    const JavaParamRef<jobject>& joptions,
    jlong native_audio_device_module,
    jlong native_audio_encoder_factory,
    jlong native_audio_decoder_factory,
    const JavaParamRef<jobject>& jencoder_factory,
    const JavaParamRef<jobject>& jdecoder_factory,
    jlong native_audio_processor,
    jlong native_fec_controller_factory,
    jlong native_network_controller_factory,
    jlong native_network_state_predictor_factory,
    jlong native_media_transport_factory,
    jlong native_neteq_factory) {
  rtc::scoped_refptr<AudioProcessing> audio_processor =
      reinterpret_cast<AudioProcessing*>(native_audio_processor);
  return CreatePeerConnectionFactoryForJava(
      jni, jcontext, joptions,
      reinterpret_cast<AudioDeviceModule*>(native_audio_device_module),
      TakeOwnershipOfRefPtr<AudioEncoderFactory>(native_audio_encoder_factory),
      TakeOwnershipOfRefPtr<AudioDecoderFactory>(native_audio_decoder_factory),
      jencoder_factory, jdecoder_factory,
      audio_processor ? audio_processor : CreateAudioProcessing(),
      TakeOwnershipOfUniquePtr<FecControllerFactoryInterface>(
          native_fec_controller_factory),
      TakeOwnershipOfUniquePtr<NetworkControllerFactoryInterface>(
          native_network_controller_factory),
      TakeOwnershipOfUniquePtr<NetworkStatePredictorFactoryInterface>(
          native_network_state_predictor_factory),
      TakeOwnershipOfUniquePtr<MediaTransportFactory>(
          native_media_transport_factory),
      TakeOwnershipOfUniquePtr<NetEqFactory>(native_neteq_factory));
}

//----------------------------------------------------------------------------------
// 這個流程裏,初始化很多有用的東西,有 socket server, 音視頻編解碼,fec 工廠,
// 任務隊列等
//----------------------------------------------------------------------------------
./sdk/android/src/jni/pc/peer_connection_factory.cc

ScopedJavaLocalRef<jobject> CreatePeerConnectionFactoryForJava(
    JNIEnv* jni,
    const JavaParamRef<jobject>& jcontext,
    const JavaParamRef<jobject>& joptions,
    rtc::scoped_refptr<AudioDeviceModule> audio_device_module,
    rtc::scoped_refptr<AudioEncoderFactory> audio_encoder_factory,
    rtc::scoped_refptr<AudioDecoderFactory> audio_decoder_factory,
    const JavaParamRef<jobject>& jencoder_factory,
    const JavaParamRef<jobject>& jdecoder_factory,
    rtc::scoped_refptr<AudioProcessing> audio_processor,
    std::unique_ptr<FecControllerFactoryInterface> fec_controller_factory,
    std::unique_ptr<NetworkControllerFactoryInterface>
        network_controller_factory,
    std::unique_ptr<NetworkStatePredictorFactoryInterface>
        network_state_predictor_factory,
    std::unique_ptr<MediaTransportFactory> media_transport_factory,
    std::unique_ptr<NetEqFactory> neteq_factory) {
  // talk/ assumes pretty widely that the current Thread is ThreadManager'd, but
  // ThreadManager only WrapCurrentThread()s the thread where it is first
  // created.  Since the semantics around when auto-wrapping happens in
  // webrtc/rtc_base/ are convoluted, we simply wrap here to avoid having to
  // think about ramifications of auto-wrapping there.
  rtc::ThreadManager::Instance()->WrapCurrentThread();

  //------------------------------------------
  // 這個就是我們的 socket server 對象
  //------------------------------------------
  std::unique_ptr<rtc::Thread> network_thread = rtc::Thread::CreateWithSocketServer();
  network_thread->SetName("network_thread", nullptr);
  RTC_CHECK(network_thread->Start()) << "Failed to start thread";

  std::unique_ptr<rtc::Thread> worker_thread = rtc::Thread::Create();
  worker_thread->SetName("worker_thread", nullptr);
  RTC_CHECK(worker_thread->Start()) << "Failed to start thread";

  std::unique_ptr<rtc::Thread> signaling_thread = rtc::Thread::Create();
  signaling_thread->SetName("signaling_thread", NULL);
  RTC_CHECK(signaling_thread->Start()) << "Failed to start thread";

  rtc::NetworkMonitorFactory* network_monitor_factory = nullptr;

  //------------------------------------------
  // 這個待分析。。。
  //------------------------------------------
  const absl::optional<PeerConnectionFactoryInterface::Options> options =
      JavaToNativePeerConnectionFactoryOptions(jni, joptions);

  // Do not create network_monitor_factory only if the options are
  // provided and disable_network_monitor therein is set to true.
  if (!(options && options->disable_network_monitor)) {
    network_monitor_factory = new AndroidNetworkMonitorFactory();
    rtc::NetworkMonitorFactory::SetFactory(network_monitor_factory);
  }

  PeerConnectionFactoryDependencies dependencies;
  dependencies.network_thread = network_thread.get();
  dependencies.worker_thread = worker_thread.get();
  dependencies.signaling_thread = signaling_thread.get();
  //------------------------------------------
  // 任務隊列工廠
  //------------------------------------------
  dependencies.task_queue_factory = CreateDefaultTaskQueueFactory();
  //------------------------------------------
  // call 對象工廠
  //------------------------------------------
  dependencies.call_factory = CreateCallFactory();
  dependencies.event_log_factory = std::make_unique<RtcEventLogFactory>(
      dependencies.task_queue_factory.get());
  //------------------------------------------
  // fec 對象工廠
  //------------------------------------------
  dependencies.fec_controller_factory = std::move(fec_controller_factory);
  dependencies.network_controller_factory = std::move(network_controller_factory);
  dependencies.network_state_predictor_factory = std::move(network_state_predictor_factory);
  dependencies.media_transport_factory = std::move(media_transport_factory);
  dependencies.neteq_factory = std::move(neteq_factory);

  cricket::MediaEngineDependencies media_dependencies;
  media_dependencies.task_queue_factory = dependencies.task_queue_factory.get();
  media_dependencies.adm = std::move(audio_device_module);
  //------------------------------------------
  // Java 層已經創建過了音頻編碼工廠
  //------------------------------------------
  media_dependencies.audio_encoder_factory = std::move(audio_encoder_factory);
  //------------------------------------------
  // Java 層已經創建過了音頻解碼工廠
  //------------------------------------------
  media_dependencies.audio_decoder_factory = std::move(audio_decoder_factory);
  media_dependencies.audio_processing = std::move(audio_processor);
  //------------------------------------------
  // 產生視頻編碼器工廠
  //------------------------------------------
  media_dependencies.video_encoder_factory =
      absl::WrapUnique(CreateVideoEncoderFactory(jni, jencoder_factory));
  //------------------------------------------
  // 產生視頻解碼器工廠
  //------------------------------------------
  media_dependencies.video_decoder_factory =
      absl::WrapUnique(CreateVideoDecoderFactory(jni, jdecoder_factory));
  dependencies.media_engine =
      cricket::CreateMediaEngine(std::move(media_dependencies));

  //------------------------------------------    
  // 創建 PeerConnectionFactory
  //------------------------------------------
  rtc::scoped_refptr<PeerConnectionFactoryInterface> factory =
      CreateModularPeerConnectionFactory(std::move(dependencies));

  RTC_CHECK(factory) << "Failed to create the peer connection factory; "
                     << "WebRTC/libjingle init likely failed on this device";
  // TODO(honghaiz): Maybe put the options as the argument of
  // CreatePeerConnectionFactory.
  if (options)
    factory->SetOptions(*options);

  return NativeToScopedJavaPeerConnectionFactory(
      jni, factory, std::move(network_thread), std::move(worker_thread),
      std::move(signaling_thread), network_monitor_factory);
}

上面的流程中,就有關相關的變量我們具體講解一下

3.1 

network_thread  就是我們的 socket 類廠,並自帶線程驅動機制,具體運作機制參考博文裏的分析

https://blog.csdn.net/freeabc/article/details/106000923

3.2

CreateDefaultTaskQueueFactory 就是創建一個任務隊列工廠, 這個根據操作系統類型,分爲好幾種

我們就以 std 的爲例吧,其中我們的 pacer 發送隊列用到的就是這個

./api/task_queue/default_task_queue_factory_stdlib.cc

std::unique_ptr<TaskQueueFactory> CreateDefaultTaskQueueFactory() {
  return CreateTaskQueueStdlibFactory();
}

./rtc_base/task_queue_stdlib.cc

std::unique_ptr<TaskQueueFactory> CreateTaskQueueStdlibFactory() {
  return std::make_unique<TaskQueueStdlibFactory>();
}

./rtc_base/task_queue_stdlib.cc

class TaskQueueStdlibFactory final : public TaskQueueFactory {
 public:
  std::unique_ptr<TaskQueueBase, TaskQueueDeleter> CreateTaskQueue(
      absl::string_view name,
      Priority priority) const override {
    return std::unique_ptr<TaskQueueBase, TaskQueueDeleter>(
        new TaskQueueStdlib(name, TaskQueuePriorityToThreadPriority(priority)));
  }
};

3.3

CreateCallFactory 呼叫對象類廠分析

./call/call_factory.cc

std::unique_ptr<CallFactoryInterface> CreateCallFactory() {
  return std::unique_ptr<CallFactoryInterface>(new CallFactory());
}

後續具體創建一個 call 對象,其實就是調用的  Call::Create(config);

Call* CallFactory::CreateCall(const Call::Config& config) {
  absl::optional<webrtc::BuiltInNetworkBehaviorConfig> send_degradation_config =
      ParseDegradationConfig(true);
  absl::optional<webrtc::BuiltInNetworkBehaviorConfig>
      receive_degradation_config = ParseDegradationConfig(false);

  if (send_degradation_config || receive_degradation_config) {
    return new DegradedCall(std::unique_ptr<Call>(Call::Create(config)),
                            send_degradation_config, receive_degradation_config,
                            config.task_queue_factory);
  }

  return Call::Create(config);
}

3.4 創建視頻編碼器類廠 CreateVideoEncoderFactory

./sdk/android/src/jni/pc/video.cc

VideoEncoderFactory* CreateVideoEncoderFactory(
    JNIEnv* jni,
    const JavaRef<jobject>& j_encoder_factory) {
  return IsNull(jni, j_encoder_factory)
             ? nullptr
             : new VideoEncoderFactoryWrapper(jni, j_encoder_factory);
}

./sdk/android/src/jni/video_encoder_factory_wrapper.cc

VideoEncoderFactoryWrapper::VideoEncoderFactoryWrapper(
    JNIEnv* jni,
    const JavaRef<jobject>& encoder_factory)
    : encoder_factory_(jni, encoder_factory) {
  const ScopedJavaLocalRef<jobjectArray> j_supported_codecs =
      Java_VideoEncoderFactory_getSupportedCodecs(jni, encoder_factory);
  supported_formats_ = JavaToNativeVector<SdpVideoFormat>(
      jni, j_supported_codecs, &VideoCodecInfoToSdpVideoFormat);
  const ScopedJavaLocalRef<jobjectArray> j_implementations =
      Java_VideoEncoderFactory_getImplementations(jni, encoder_factory);
  implementations_ = JavaToNativeVector<SdpVideoFormat>(
      jni, j_implementations, &VideoCodecInfoToSdpVideoFormat);
}

創建視頻編碼器接口

std::unique_ptr<VideoEncoder> VideoEncoderFactoryWrapper::CreateVideoEncoder(
    const SdpVideoFormat& format) {
  JNIEnv* jni = AttachCurrentThreadIfNeeded();
  ScopedJavaLocalRef<jobject> j_codec_info =
      SdpVideoFormatToVideoCodecInfo(jni, format);
  ScopedJavaLocalRef<jobject> encoder = Java_VideoEncoderFactory_createEncoder(
      jni, encoder_factory_, j_codec_info);
  if (!encoder.obj())
    return nullptr;
  return JavaToNativeVideoEncoder(jni, encoder);
}

3.5 創建解碼器類廠  CreateVideoDecoderFactory

./sdk/android/src/jni/pc/video.cc

VideoDecoderFactory* CreateVideoDecoderFactory(
    JNIEnv* jni,
    const JavaRef<jobject>& j_decoder_factory) {
  return IsNull(jni, j_decoder_factory)
             ? nullptr
             : new VideoDecoderFactoryWrapper(jni, j_decoder_factory);
}

./sdk/android/src/jni/video_decoder_factory_wrapper.cc

VideoDecoderFactoryWrapper::VideoDecoderFactoryWrapper(
    JNIEnv* jni,
    const JavaRef<jobject>& decoder_factory)
    : decoder_factory_(jni, decoder_factory) {}

創建解碼器接口

std::unique_ptr<VideoDecoder> VideoDecoderFactoryWrapper::CreateVideoDecoder(
    const SdpVideoFormat& format) {
  JNIEnv* jni = AttachCurrentThreadIfNeeded();
  ScopedJavaLocalRef<jobject> j_codec_info =
      SdpVideoFormatToVideoCodecInfo(jni, format);
  ScopedJavaLocalRef<jobject> decoder = Java_VideoDecoderFactory_createDecoder(
      jni, decoder_factory_, j_codec_info);
  if (!decoder.obj())
    return nullptr;  
  return JavaToNativeVideoDecoder(jni, decoder);
}

3.6 媒體引擎類廠的創建 CreateMediaEngine

./media/engine/webrtc_media_engine.cc

std::unique_ptr<MediaEngineInterface> CreateMediaEngine(
    MediaEngineDependencies dependencies) {
  auto audio_engine = std::make_unique<WebRtcVoiceEngine>(
      dependencies.task_queue_factory, std::move(dependencies.adm),
      std::move(dependencies.audio_encoder_factory),
      std::move(dependencies.audio_decoder_factory),
      std::move(dependencies.audio_mixer),
      std::move(dependencies.audio_processing));
#ifdef HAVE_WEBRTC_VIDEO
  auto video_engine = std::make_unique<WebRtcVideoEngine>(
      std::move(dependencies.video_encoder_factory),
      std::move(dependencies.video_decoder_factory));
#else
  auto video_engine = std::make_unique<NullWebRtcVideoEngine>();
#endif
  return std::make_unique<CompositeMediaEngine>(std::move(audio_engine),
                                                std::move(video_engine));
}

我們看到 media_engine 是 CompositeMediaEngine 對象, 主要包含 audio_engine 和 video_engine

audio_engine 包含音頻的編碼類廠,解碼類廠,混音處理,聲音處理

video_engine 包含視頻的編碼類廠,解碼類廠

我們後續的代碼中經常出現的 media_engine() 其實就是 CompositeMediaEngine 對象

media_engine()->voice() 和 media_engine()->video() 分別代表上面的 WebRtcVoiceEngine 和 WebRtcVideoEngine

 

4. 產生 PeerConnectionFactory 對象 CreateModularPeerConnectionFactory

rtc::scoped_refptr<PeerConnectionFactoryInterface>
CreateModularPeerConnectionFactory(
    PeerConnectionFactoryDependencies dependencies) {
  rtc::scoped_refptr<PeerConnectionFactory> pc_factory(
      new rtc::RefCountedObject<PeerConnectionFactory>(
          std::move(dependencies)));
  // Call Initialize synchronously but make sure it is executed on
  // |signaling_thread|.
  MethodCall<PeerConnectionFactory, bool> call(
      pc_factory.get(), &PeerConnectionFactory::Initialize);
  bool result = call.Marshal(RTC_FROM_HERE, pc_factory->signaling_thread());

  if (!result) {
    return nullptr;
  }
  return PeerConnectionFactoryProxy::Create(pc_factory->signaling_thread(),
                                            pc_factory);
}

其實這個裏面做了兩件事, PeerConnectionFactory 構造與初始化

PeerConnectionFactory::PeerConnectionFactory(
    PeerConnectionFactoryDependencies dependencies)
    : wraps_current_thread_(false),
      network_thread_(dependencies.network_thread),
      worker_thread_(dependencies.worker_thread),
      signaling_thread_(dependencies.signaling_thread),
      task_queue_factory_(std::move(dependencies.task_queue_factory)),
      media_engine_(std::move(dependencies.media_engine)),
      call_factory_(std::move(dependencies.call_factory)),
      event_log_factory_(std::move(dependencies.event_log_factory)),
      fec_controller_factory_(std::move(dependencies.fec_controller_factory)),
      network_state_predictor_factory_(
          std::move(dependencies.network_state_predictor_factory)),
      injected_network_controller_factory_(
          std::move(dependencies.network_controller_factory)),
      media_transport_factory_(std::move(dependencies.media_transport_factory)),
      neteq_factory_(std::move(dependencies.neteq_factory)),
      trials_(dependencies.trials ? std::move(dependencies.trials)
                                  : std::make_unique<FieldTrialBasedConfig>()) {
  if (!network_thread_) {
    owned_network_thread_ = rtc::Thread::CreateWithSocketServer();
    owned_network_thread_->SetName("pc_network_thread", nullptr);
    owned_network_thread_->Start();
    network_thread_ = owned_network_thread_.get();
  }

  if (!worker_thread_) {
    owned_worker_thread_ = rtc::Thread::Create();
    owned_worker_thread_->SetName("pc_worker_thread", nullptr);
    owned_worker_thread_->Start();
    worker_thread_ = owned_worker_thread_.get();
  }

  if (!signaling_thread_) {
    signaling_thread_ = rtc::Thread::Current();
    if (!signaling_thread_) {
      // If this thread isn't already wrapped by an rtc::Thread, create a
      // wrapper and own it in this class.
      signaling_thread_ = rtc::ThreadManager::Instance()->WrapCurrentThread();
      wraps_current_thread_ = true;
    }
  }
}
bool PeerConnectionFactory::Initialize() {
  RTC_DCHECK(signaling_thread_->IsCurrent());
  rtc::InitRandom(rtc::Time32());

  default_network_manager_.reset(new rtc::BasicNetworkManager());
  if (!default_network_manager_) {
    return false;
  }

  default_socket_factory_.reset(
      new rtc::BasicPacketSocketFactory(network_thread_));
  if (!default_socket_factory_) {
    return false;
  }

  channel_manager_ = std::make_unique<cricket::ChannelManager>(
      std::move(media_engine_), std::make_unique<cricket::RtpDataEngine>(),
      worker_thread_, network_thread_);

  channel_manager_->SetVideoRtxEnabled(true);
  if (!channel_manager_->Init()) {
    return false;
  }

  return true;
}

初始化接口裏很重要,這裏面產生 default_network_manager_( 網絡管理 ), default_socket_factory_(socket 類廠 ), 

channel_manager_(通道管理)

default_network_manager_ : 主要是網絡類型,網絡地址等信息的管理

default_socket_factory_ : 我們用到的 socket 的通訊都是通過它進行創建的

channel_manager_ :  呼叫通道管理,包含媒體引擎 media_engine 和 data_engine

./pc/channel_manager.cc

ChannelManager::ChannelManager(
    std::unique_ptr<MediaEngineInterface> media_engine,
    std::unique_ptr<DataEngineInterface> data_engine,
    rtc::Thread* worker_thread,
    rtc::Thread* network_thread)
    : media_engine_(std::move(media_engine)),
      data_engine_(std::move(data_engine)),
      main_thread_(rtc::Thread::Current()),
      worker_thread_(worker_thread),
      network_thread_(network_thread) {
  RTC_DCHECK(data_engine_);
  RTC_DCHECK(worker_thread_);
  RTC_DCHECK(network_thread_);
}

通道管理最重要的作用是爲一路呼叫創建 VoiceChannel (同時創建 VoiceMediaChannel) 和 

VideoChannel (同時創建 VideoMediaChannel)。

 

下面以 VideoChannel 爲例,講一下,這個裏面包含什麼對象:

VideoChannel 包含發送參數,接收參數,VideoMediaChannel ( WebRtcVideoChannel ), 帶寬估算信息,

本地流參數,遠端流參數,以及 rtp_transport_ ( P2PTransportChannel 傳輸相關的處理 )

 

VideoMediaChannel 其實就是 WebRtcVideoChannel 對象,這個裏面包含了,發送流對象,接收流對象,編碼類廠,

解碼類廠, 碼率分配類廠, 發送 rtp 擴展,接收 rtp 擴展, 發送狀態統計,接收狀態統計,帶寬估算統計,發送和接收的

編碼信息統計等諸多信息,  是個需要關注的重點類!!!!!!

./media/engine/webrtc_video_engine.h

WebRtcVideoChannel::WebRtcVideoChannel(
    webrtc::Call* call,
    const MediaConfig& config,
    const VideoOptions& options,
    const webrtc::CryptoOptions& crypto_options,
    webrtc::VideoEncoderFactory* encoder_factory,
    webrtc::VideoDecoderFactory* decoder_factory,
    webrtc::VideoBitrateAllocatorFactory* bitrate_allocator_factory)
    : VideoMediaChannel(config),
      worker_thread_(rtc::Thread::Current()),
      call_(call),
      unsignalled_ssrc_handler_(&default_unsignalled_ssrc_handler_),
      video_config_(config.video),
      encoder_factory_(encoder_factory),
      decoder_factory_(decoder_factory),
      bitrate_allocator_factory_(bitrate_allocator_factory),
      default_send_options_(options),
      last_stats_log_ms_(-1),
      discard_unknown_ssrc_packets_(webrtc::field_trial::IsEnabled(
          "WebRTC-Video-DiscardPacketsWithUnknownSsrc")),
      crypto_options_(crypto_options),
      unknown_ssrc_packet_buffer_(
          webrtc::field_trial::IsEnabled(
              "WebRTC-Video-BufferPacketsWithUnknownSsrc")
              ? new UnhandledPacketsBuffer()
              : nullptr) {
  RTC_DCHECK(thread_checker_.IsCurrent());

  rtcp_receiver_report_ssrc_ = kDefaultRtcpReceiverReportSsrc;
  sending_ = false;
  recv_codecs_ =
      MapCodecs(AssignPayloadTypesAndDefaultCodecs(encoder_factory_));
  recv_flexfec_payload_type_ = recv_codecs_.front().flexfec_payload_type;
}

一個  WebRtcVideoChannel 包含多個發送對象 ( WebRtcVideoSendStream ) 和接收對象 ( WebRtcVideoReceiveStream )

這兩個對象也非常關鍵,需要重點分析這些類,才能更好的理解 webrtc 的運作機制, 其實就是封裝了整個視頻的

發送 pipeline ( capture ---> encoder ----> pacer ----> socket )

WebRtcVideoChannel::WebRtcVideoSendStream::WebRtcVideoSendStream(
    webrtc::Call* call,
    const StreamParams& sp,
    webrtc::VideoSendStream::Config config,
    const VideoOptions& options,
    bool enable_cpu_overuse_detection,
    int max_bitrate_bps,
    const absl::optional<VideoCodecSettings>& codec_settings,
    const absl::optional<std::vector<webrtc::RtpExtension>>& rtp_extensions,
    // TODO(deadbeef): Don't duplicate information between send_params,
    // rtp_extensions, options, etc.
    const VideoSendParameters& send_params)
    : worker_thread_(rtc::Thread::Current()),
      ssrcs_(sp.ssrcs),
      ssrc_groups_(sp.ssrc_groups),
      call_(call),
      enable_cpu_overuse_detection_(enable_cpu_overuse_detection),
      source_(nullptr),
      stream_(nullptr),
      encoder_sink_(nullptr),
      parameters_(std::move(config), options, max_bitrate_bps, codec_settings),
      rtp_parameters_(CreateRtpParametersWithEncodings(sp)),
      sending_(false) {
  // Maximum packet size may come in RtpConfig from external transport, for
  // example from QuicTransportInterface implementation, so do not exceed
  // given max_packet_size.
  parameters_.config.rtp.max_packet_size =
      std::min<size_t>(parameters_.config.rtp.max_packet_size, kVideoMtu);
  parameters_.conference_mode = send_params.conference_mode;

  sp.GetPrimarySsrcs(&parameters_.config.rtp.ssrcs);

  // ValidateStreamParams should prevent this from happening.
  RTC_CHECK(!parameters_.config.rtp.ssrcs.empty());
  rtp_parameters_.encodings[0].ssrc = parameters_.config.rtp.ssrcs[0];

  // RTX.
  sp.GetFidSsrcs(parameters_.config.rtp.ssrcs,
                 &parameters_.config.rtp.rtx.ssrcs);

  // FlexFEC SSRCs.
  // TODO(brandtr): This code needs to be generalized when we add support for
  // multistream protection.
  if (IsFlexfecFieldTrialEnabled()) {
    uint32_t flexfec_ssrc;
    bool flexfec_enabled = false;
    for (uint32_t primary_ssrc : parameters_.config.rtp.ssrcs) {
      if (sp.GetFecFrSsrc(primary_ssrc, &flexfec_ssrc)) {
        if (flexfec_enabled) {
          RTC_LOG(LS_INFO)
              << "Multiple FlexFEC streams in local SDP, but "
                 "our implementation only supports a single FlexFEC "
                 "stream. Will not enable FlexFEC for proposed "
                 "stream with SSRC: "
              << flexfec_ssrc << ".";
          continue;
        }

        flexfec_enabled = true;
        parameters_.config.rtp.flexfec.ssrc = flexfec_ssrc;
        parameters_.config.rtp.flexfec.protected_media_ssrcs = {primary_ssrc};
      }
    }
  }

  parameters_.config.rtp.c_name = sp.cname;
  if (rtp_extensions) {
    parameters_.config.rtp.extensions = *rtp_extensions;
    rtp_parameters_.header_extensions = *rtp_extensions;
  }
  parameters_.config.rtp.rtcp_mode = send_params.rtcp.reduced_size
                                         ? webrtc::RtcpMode::kReducedSize
                                         : webrtc::RtcpMode::kCompound;
  parameters_.config.rtp.mid = send_params.mid;
  rtp_parameters_.rtcp.reduced_size = send_params.rtcp.reduced_size;

  if (codec_settings) {
    SetCodec(*codec_settings);
  }
}

接收 pipeline ( socket ---> jitterbuffer ---> decoder ---> render )

WebRtcVideoChannel::WebRtcVideoReceiveStream::WebRtcVideoReceiveStream(
    WebRtcVideoChannel* channel,
    webrtc::Call* call,
    const StreamParams& sp,
    webrtc::VideoReceiveStream::Config config,
    webrtc::VideoDecoderFactory* decoder_factory,
    bool default_stream,
    const std::vector<VideoCodecSettings>& recv_codecs,
    const webrtc::FlexfecReceiveStream::Config& flexfec_config)
    : channel_(channel),
      call_(call),
      stream_params_(sp),
      stream_(NULL),
      default_stream_(default_stream),
      config_(std::move(config)),
      flexfec_config_(flexfec_config),
      flexfec_stream_(nullptr),
      decoder_factory_(decoder_factory),
      sink_(NULL),
      first_frame_timestamp_(-1),
      estimated_remote_start_ntp_time_ms_(0) {
  config_.renderer = this;
  ConfigureCodecs(recv_codecs);
  ConfigureFlexfecCodec(flexfec_config.payload_type);
  MaybeRecreateWebRtcFlexfecStream();
  RecreateWebRtcVideoStream();
}

有關 WebRTC 的更多知識可以參考我的其它的博客

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章