之前講過如何在centos上使用nginx搭建rtmp服務器(鏈接),本文介紹一下iOS 端如何通過rtmp錄屏直播以及觀看,完整的工程代碼地址(https://github.com/zxm006/Rtmp_iOS),本文也主要是介紹此工程的各個模塊,有需要的可以去下載。有什麼問題歡迎加qq592979271 交流。
1.攝像頭視頻採集
iOS 端,底層攝像頭是通過AVFoundation採集視頻,具體怎麼調用網上也有很多介紹。這裏簡單再介紹一下。
1)首先遍歷 攝像頭(AVCaptureDevice), 得到前置或者後置攝像頭device,然後用得到的device創建AVCaptureDeviceInput的對象videoInput。
2)創建AVCaptureSession,設置好分辨率,然後將videoInput加入到 Session中。然後通過設置AVCaptureVideoDataOutput等,在啓動AVCaptureSession之後,在captureOutput函數中得到採集的數據,爲CMSampleBufferRef對象。見如下代碼(具體見上述工程中的CameraHelp類文件)。
-(void)startVideoCapture
{
NSLog(@"startVideoCapture");
//防鎖
[[UIApplication sharedApplication] setIdleTimerDisabled:YES];
if(_mCaptureDevice || _mCaptureSession)
{
NSLog(@"Already capturing");
return;
}
if((_mCaptureDevice = [CameraHelp cameraAtPosition: AVCaptureDevicePositionFront]) == nil)
{
NSLog(@"Failed to get valide capture device");
return;
}
NSError *error = nil;
_videoInput = [AVCaptureDeviceInput deviceInputWithDevice:_mCaptureDevice error:&error];
if (!_videoInput)
{
NSLog(@"Failed to get video input");
self.mCaptureDevice = nil;
return;
}
_mCaptureSession = [[AVCaptureSession alloc] init];
if( _mresType ==0){
_mCaptureSession.sessionPreset = AVCaptureSessionPreset352x288;
}
else if( _mresType ==1){
_mCaptureSession.sessionPreset = AVCaptureSessionPreset640x480;//
}
else if( _mresType ==2){
_mCaptureSession.sessionPreset = AVCaptureSessionPresetHigh;
}
else {
_mCaptureSession.sessionPreset = AVCaptureSessionPreset640x480;
}
[_mCaptureSession addInput:_videoInput];
AVCaptureVideoDataOutput*avCaptureVideoDataOutput = [[[AVCaptureVideoDataOutput alloc] init]autorelease];
avCaptureVideoDataOutput.videoSettings = [[[NSDictionary alloc] initWithObjectsAndKeys:
[NSNumber numberWithUnsignedInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange], kCVPixelBufferPixelFormatTypeKey,nil]autorelease];
avCaptureVideoDataOutput.alwaysDiscardsLateVideoFrames = YES;
[_mCaptureSession beginConfiguration];
[_mCaptureDevice lockForConfiguration:&error];
[_mCaptureDevice setActiveVideoMinFrameDuration:CMTimeMake(1, 12)];
[_mCaptureDevice setActiveVideoMaxFrameDuration:CMTimeMake(1, 18)];
[_mCaptureDevice unlockForConfiguration];
[_mCaptureSession commitConfiguration];
dispatch_queue_t queue = dispatch_queue_create("videoSession--ouput", NULL);
[avCaptureVideoDataOutput setSampleBufferDelegate:self queue:queue];
[_mCaptureSession addOutput:avCaptureVideoDataOutput];
dispatch_release(queue);
mStarted = YES;
doing=NO;
AVCaptureConnection *videoConnection = [avCaptureVideoDataOutput connectionWithMediaType:AVMediaTypeVideo];
// SET THE ORIENTATION HERE -------------------------------------------------
[videoConnection setVideoOrientation:AVCaptureVideoOrientationPortrait];
if(![_mCaptureSession isRunning]){
[_mCaptureSession startRunning];
}
[self startPreview];
}
2.屏幕錄製
蘋果在iOS 10 以後出了RePlayKit用於屏幕錄製,可以錄製文件也可以得到視頻流數據,隨着版本的更新也越來越方便。這裏首先介紹截屏的方式。大致就是一秒鐘截30次屏幕,主要代碼如下。(具體見上述工程中的CapScreen類文件)
```objectivec
- (UIImage *)capWindow: (UIWindow *) window
{
CGSize capsize = window.bounds.size;
if(m_uiWidth>m_uiHeight&&capsize.width<capsize.height)
{
int width=capsize.width;
capsize.width=capsize.height;
capsize.height =width;
}
else
{
[window snapshotViewAfterScreenUpdates:NO];
}
UIGraphicsBeginImageContextWithOptions(capsize, YES,0);
NSInvocation *invocation = [NSInvocation invocationWithMethodSignature:
[window methodSignatureForSelector:
@selector(drawViewHierarchyInRect:afterScreenUpdates:)]];
[invocation setTarget:window];
[invocation setSelector:@selector(drawViewHierarchyInRect:afterScreenUpdates:)];
CGRect arg2 = window.bounds;
BOOL arg3 = NO;
[invocation setArgument:&arg2 atIndex:2];
[invocation setArgument:&arg3 atIndex:3];
[invocation invoke];
UIImage *screenshot = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return screenshot;
}
3.h264視頻硬編碼
首先,創建VTCompressionSessionRef對象,並進行相關設置(回調函數,分辨率,gop大小,碼率等),(具體見上述工程中的H264HwEncoderImpl類文件)
// Create the compression session
OSStatus status = VTCompressionSessionCreate(NULL, width, height, kCMVideoCodecType_H264, NULL, NULL, NULL, didCompressH264, (__bridge void *)(self), &EncodingSession);
NSLog(@"H264: VTCompressionSessionCreate %d", (int)status);
if (status != 0)
{
NSLog(@"H264: Unable to create a H264 session");
_error = @"H264: Unable to create a H264 session";
return ;
}
// 設置實時編碼輸出,降低編碼延遲
status = VTSessionSetProperty(EncodingSession, kVTCompressionPropertyKey_RealTime, kCFBooleanTrue);
NSLog(@"set realtime return: %d", (int)status);
// h264 profile, 直播一般使用baseline,可減少由於b幀帶來的延時
status = VTSessionSetProperty(EncodingSession, kVTCompressionPropertyKey_ProfileLevel, kVTProfileLevel_H264_High_AutoLevel);
NSLog(@"set profile return: %d", (int)status);
// 設置編碼碼率(比特率),如果不設置,默認將會以很低的碼率編碼,導致編碼出來的視頻很模糊
status = VTSessionSetProperty(EncodingSession, kVTCompressionPropertyKey_AverageBitRate, (__bridge CFTypeRef)@(bt)); // bps
status += VTSessionSetProperty(EncodingSession, kVTCompressionPropertyKey_DataRateLimits, (__bridge CFArrayRef)@[@(bt*2/8), @1]); // Bps
NSLog(@"set bitrate return: %d", (int)status);
// 設置關鍵幀間隔,即gop size
status = VTSessionSetProperty(EncodingSession, kVTCompressionPropertyKey_MaxKeyFrameInterval, (__bridge CFTypeRef)@(fps));
NSLog(@"set MaxKeyFrame return: %d", (int)status);
// 設置幀率,只用於初始化session,不是實際FPS
status = VTSessionSetProperty(EncodingSession, kVTCompressionPropertyKey_ExpectedFrameRate, (__bridge CFTypeRef)@(fps));
NSLog(@"set framerate return: %d", (int)status);
VTSessionSetProperty(EncodingSession, kVTCompressionPropertyKey_AllowFrameReordering, kCFBooleanFalse);
// // 開始編碼
status = VTCompressionSessionPrepareToEncodeFrames(EncodingSession);
NSLog(@" VTCompressionSessionPrepareToEncodeFrames return: %d", (int)status);
3.音頻的採集和播放*
音頻使用了AudioQueue方式採集,iOS 支持直接輸出aac格式的音頻,具體參考RecordAndSend類文件。首先使用AVAudioSession設置PlayAndRecord 模式, 需要保證先激活AVAudioSession( [[AVAudioSession sharedInstance] setActive:YES error:nil];),
音頻播放,因爲如果採用AudioQueue方式,在每次回調時候塞入數據可能導致音頻延遲,所以採用了openal,具體參考AudioPlay類文件。
4.rtmp推送和收取
rtmp推流和拉流採用了開源庫rtmpdump,封裝成了uuRtmpClient類(具體見上述工程中的uuRtmpClient類文件)。具體包括rtmpdump接口的 各種調用,連接服務器,發送接收數據,以及Metadata格式封裝等,需要的話可以直接使用。
4.ffmpeg解碼
編譯ffmpeg iOS 版本,將下述腳本保存,根據自己的 需要進行就修改並執行,得到iOS 版本的ffmpg庫,導入到工程中。
#!/bin/sh
# directories
SOURCE="ffmpeg-4.0"
FAT="FFmpeg-iOS"
SCRATCH="scratch"
# must be an absolute path
THIN=`pwd`/"thin"
CONFIGURE_FLAGS=" --disable-avdevice --disable-avfilter --disable-network --disable-programs --disable-ffmpeg --disable-debug --disable-ffplay --disable-iconv --disable-ffprobe --disable-encoders --disable-decoders \
--disable-filters --disable-swscale --disable-armv6 --disable-armv6t2 --disable-protocols \
--disable-muxers --disable-demuxers --disable-parsers --disable-bsfs \
--disable-sdl2 --disable-armv5te --disable-vfp --disable-swresample --disable-everything \
--enable-cross-compile --enable-pic --enable-small --enable-optimizations \
--enable-decoder=h264 \
--enable-nonfree --enable-gpl"
if [ "$X264" ]
then
CONFIGURE_FLAGS="$CONFIGURE_FLAGS --enable-gpl --enable-encoder=libx264 --enable-libx264"
fi
if [ "$FDK_AAC" ]
then
echo 'enable-libfdk-aac'
CONFIGURE_FLAGS="$CONFIGURE_FLAGS --enable-libfdk-aac"
fi
# avresample
#CONFIGURE_FLAGS="$CONFIGURE_FLAGS --enable-avresample"
#x86_64
ARCHS="arm64 armv7 x86_64"
COMPILE="y"
LIPO="y"
DEPLOYMENT_TARGET="8.0"
if [ "$*" ]
then
if [ "$*" = "lipo" ]
then
# skip compile
COMPILE=
else
ARCHS="$*"
if [ $# -eq 1 ]
then
# skip lipo
LIPO=
fi
fi
fi
if [ "$COMPILE" ]
then
if [ ! `which yasm` ]
then
echo 'Yasm not found'
if [ ! `which brew` ]
then
echo 'Homebrew not found. Trying to install...'
ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)" \
|| exit 1
fi
echo 'Trying to install Yasm...'
brew install yasm || exit 1
fi
if [ ! `which gas-preprocessor.pl` ]
then
echo 'gas-preprocessor.pl not found. Trying to install...'
(curl -L https://github.com/libav/gas-preprocessor/raw/master/gas-preprocessor.pl \
-o /usr/local/bin/gas-preprocessor.pl \
&& chmod +x /usr/local/bin/gas-preprocessor.pl) \
|| exit 1
fi
if [ ! -r $SOURCE ]
then
echo 'FFmpeg source not found. Trying to download...'
curl http://www.ffmpeg.org/releases/$SOURCE.tar.bz2 | tar xj \
|| exit 1
fi
CWD=`pwd`
for ARCH in $ARCHS
do
echo "building $ARCH..."
mkdir -p "$SCRATCH/$ARCH"
cd "$SCRATCH/$ARCH"
CFLAGS="-arch $ARCH"
if [ "$ARCH" = "i386" -o "$ARCH" = "x86_64" ]
then
PLATFORM="iPhoneSimulator"
CFLAGS="$CFLAGS -mios-simulator-version-min=$DEPLOYMENT_TARGET"
else
PLATFORM="iPhoneOS"
CFLAGS="$CFLAGS -mios-version-min=$DEPLOYMENT_TARGET "
if [ "$ARCH" = "arm64" ]
then
EXPORT="GASPP_FIX_XCODE5=1"
fi
fi
XCRUN_SDK=`echo $PLATFORM | tr '[:upper:]' '[:lower:]'`
CC="xcrun -sdk $XCRUN_SDK clang"
CXXFLAGS="$CFLAGS"
LDFLAGS="$CFLAGS"
if [ "$X264" ]
then
CFLAGS="$CFLAGS -I$X264/include"
LDFLAGS="$LDFLAGS -L$X264/lib"
fi
if [ "$FDK_AAC" ]
then
echo 'enable-libfdk-aac -- lib'
CFLAGS="$CFLAGS -I$FDK_AAC/include"
LDFLAGS="$LDFLAGS -L$FDK_AAC/lib"
fi
TMPDIR=${TMPDIR/%\/} $CWD/$SOURCE/configure \
--target-os=darwin \
--arch=$ARCH \
--cc="$CC" \
$CONFIGURE_FLAGS \
--extra-cflags="$CFLAGS" \
--extra-ldflags="$LDFLAGS" \
--prefix="$THIN/$ARCH" \
|| exit 1
make -j3 install $EXPORT || exit 1
cd $CWD
done
fi
if [ "$LIPO" ]
then
echo "building fat binaries..."
mkdir -p $FAT/lib
set - $ARCHS
CWD=`pwd`
cd $THIN/$1/lib
for LIB in *.a
do
cd $CWD
echo lipo -create `find $THIN -name $LIB` -output $FAT/lib/$LIB 1>&2
lipo -create `find $THIN -name $LIB` -output $FAT/lib/$LIB || exit 1
done
cd $CWD
cp -rf $THIN/$1/include $FAT
fi
echo Done
ffmpeg具體使用可以參考上述工程文件中的liveFFmpegdecode類文件,主要如下:
首先初始化庫和編碼器
av_register_all();
AVCodec *codec = avcodec_find_decoder(AV_CODEC_ID_H264);
if (!codec) {
return -1;
}
m_CodecContext= avcodec_alloc_context3(codec);
if (!m_CodecContext) {
return -1;
}
if(codec->capabilities&CODEC_CAP_TRUNCATED)
m_CodecContext->flags|= CODEC_FLAG_TRUNCATED;
if (avcodec_open2(m_CodecContext, codec,0) < 0) {
avcodec_close(m_CodecContext);
avcodec_free_context(&m_CodecContext);
return -1;
}
然後解碼,過程大致如下
AVFrame* pictureFrame= av_frame_alloc();
int got_picture = 0;
if (avcodec_send_packet(m_CodecContext,&avpkt)==0)
{
got_picture= avcodec_receive_frame(m_CodecContext, pictureFrame);
}
else
{
if (pictureFrame) {
av_frame_free(&pictureFrame);
pictureFrame=NULL;
}
av_packet_unref(&avpkt);
return 0;
}
if (got_picture==0)
{
...........
}
解碼後的數據爲yuv圖像格式
5.yuv數據展示
yuv轉成rgb,然後使用UIImage展示的方式比較消耗cpu,最好是用opengl直接繪製yuv的方式,不需要轉換,具體參考上述工程中的iOSGLView類文件,使用方式比較簡單,和普通控件差不多,實現細節,熟悉opengl的同學可以研究研究,不熟悉的直接用即可。
5.ReplayKit方式錄屏
代碼可以參考工程中的MovieClipHandler和SampleHandler文件,SampleHandler爲接收數據的類。需要創建extension等操作,具體可以參考此篇文章https://www.jianshu.com/p/401b5b632d5b。ReplayKit錄製之後直播上傳可以參考我寫的一個上傳庫下載鏈接,此庫封裝了ReplayKit輸出音視頻之後的數據的硬編碼以及上傳等操作,可以直接導入都工程中使用。視頻格式h264,音頻格式aac。
以上是iOS rtmp直播上傳以及觀看的大體介紹,詳細代碼可以下載工程看一下,有問題加qq592979271交流。