屏幕錄製的命令行形式可以參考以下鏈接
https://blog.csdn.net/leixiaohua1020/article/details/38284961
我們所做的示例程序,說白了,就是通過API程序代碼來實現命令行的操作指令。
命令行形式
設備列表
ffmpeg -list_devices true -f dshow -i dummy
查看設備選項
ffmpeg -list_options true -f dshow -i video="Integrated Camera"
播放攝像頭數據
ffplay -s 1280x720 -f dshow -i video="Integrated Camera"
設置分辨率爲 1280x720
屏幕錄製
Linux版本
Linux下可以使用x11grab
ffmpeg -f x11grab -s 1600x900 -r 50 -vcodec libx264 –preset:v ultrafast –tune:v zerolatency -crf 18 -f mpegts udp://localhost:1234
Windows版本
Windows下屏幕錄製的設備:gdigrab
gdigrab是基於GDI的抓屏設備,可以用於抓取屏幕的特定區域。在這裏記錄一下gdigrab的用法。
gdigrab通過設定不同的輸入URL,支持兩種方式的屏幕抓取:
(1)“desktop”:抓取整張桌面。或者抓取桌面中的一個特定的區域。
(2)“title={窗口名稱}”:抓取屏幕中特定的一個窗口。
最簡單的抓屏:
ffmpeg -f gdigrab -i desktop out.mpg
從屏幕的(10,20)點處開始,抓取640x480的屏幕,設定幀率爲5
ffmpeg -f gdigrab -framerate 5 -offset_x 10 -offset_y 20 -video_size 640x480 -i desktop out.mpg
程序源碼
/**
* 最簡單的基於FFmpeg的AVDevice例子(屏幕錄製)
* Simplest FFmpeg Device (Screen Capture)
*
* 歸根結底,我們就是爲了實現以下屏幕錄製的命令:
* ffmpeg -f gdigrab -i desktop out.mpg
*
* 本程序實現了屏幕錄製功能。可以錄製並播放桌面數據。是基於FFmpeg
* 的libavdevice類庫最簡單的例子。通過該例子,可以學習FFmpeg中
* libavdevice類庫的使用方法。
* 本程序在Windows下可以使用2種方式錄製屏幕:
* 1.gdigrab: Win32下的基於GDI的屏幕錄製設備。
* 抓取桌面的時候,輸入URL爲“desktop”。
* 2.dshow: 使用Directshow。注意需要安裝額外的軟件screen-capture-recorder
* 在Linux下可以使用x11grab錄製屏幕。
* 在MacOS下可以使用avfoundation錄製屏幕。
*/
#include <stdio.h>
extern "C"
{
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#include "libavdevice/avdevice.h"
#include "libavutil/imgutils.h"
#include "libavutil/dict.h"
#include "SDL2/SDL.h"
}
#define OUTPUT_YUV420P 1
#define OUTPUT_H264 1
int main(int argc, char *argv[])
{
AVFormatContext *pFormatCtx;
AVStream *videoStream;
AVCodecContext *pCodecCtx;
AVCodec *pCodec;
AVFrame *pFrame, *pFrameYUV;
AVPacket *pPacket;
SwsContext *pImgConvertCtx;
int videoIndex = -1;
unsigned int i = 0;
SDL_Window *screen;
SDL_Renderer *sdlRenderer;
SDL_Texture *sdlTexture;
SDL_Rect sdlRect;
int screen_w = 0;
int screen_h = 0;
printf("Starting...\n");
//register device
avdevice_register_all();
pFormatCtx = avformat_alloc_context();
//use gdigrab
AVInputFormat *ifmt = av_find_input_format("gdigrab");
if (!ifmt)
{
printf("can't find input device.\n");
return -1;
}
AVDictionary *options = NULL;
if (avformat_open_input(&pFormatCtx, "desktop", ifmt, &options) != 0)
{
printf("can't open input stream.\n");
return -1;
}
if (avformat_find_stream_info(pFormatCtx, NULL) < 0)
{
printf("can't find stream information.\n");
return -1;
}
for (i = 0; i < pFormatCtx->nb_streams; i++)
{
if (pFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
{
videoIndex = i;
break;
}
}
if (videoIndex == -1)
{
printf("can't find a video stream.\n");
return -1;
}
videoStream = pFormatCtx->streams[videoIndex];
pCodec = avcodec_find_decoder(videoStream->codecpar->codec_id);
if (pCodec == NULL)
{
printf("codec not found.\n");
return -1;
}
pCodecCtx = avcodec_alloc_context3(pCodec);
if (!pCodecCtx)
{
printf("can't alloc codec context.\n");
return -1;
}
avcodec_parameters_to_context(pCodecCtx, videoStream->codecpar);
if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0)
{
printf("can't open codec.\n");
return -1;
}
pFrame = av_frame_alloc();
pFrameYUV = av_frame_alloc();
pPacket = (AVPacket*) av_malloc(sizeof(AVPacket));
unsigned char *outBuffer = (unsigned char*) av_malloc(
av_image_get_buffer_size(AV_PIX_FMT_YUV420P, pCodecCtx->width,
pCodecCtx->height, 1));
av_image_fill_arrays(pFrameYUV->data, pFrameYUV->linesize, outBuffer,
AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1);
pImgConvertCtx = sws_getContext(pCodecCtx->width, pCodecCtx->height,
pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height,
AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);
#if OUTPUT_YUV420P
FILE *fpYUV = fopen("output.yuv", "wb+");
#endif
#if OUTPUT_H264
AVCodecContext *pH264CodecCtx;
AVCodec *pH264Codec;
FILE *fpH264 = fopen("output.h264", "wb+");
//查找H264編碼器
pH264Codec = avcodec_find_encoder(AV_CODEC_ID_H264);
if (!pH264Codec)
{
printf("can't find h264 codec.\n");
return -1;
}
pH264CodecCtx = avcodec_alloc_context3(pH264Codec);
pH264CodecCtx->codec_id = AV_CODEC_ID_H264;
pH264CodecCtx->codec_type = AVMEDIA_TYPE_VIDEO;
pH264CodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
pH264CodecCtx->width = pCodecCtx->width;
pH264CodecCtx->height = pCodecCtx->height;
pH264CodecCtx->time_base.num = 1;
pH264CodecCtx->time_base.den = 15; //幀率(即一秒鐘多少張圖片)
pH264CodecCtx->bit_rate = 800000; //比特率(調節這個大小可以改變編碼後視頻的質量)
pH264CodecCtx->gop_size = 12;
pH264CodecCtx->qmin = 10;
pH264CodecCtx->qmax = 51;
//some formats want stream headers to be separate
if (pH264CodecCtx->flags & AVFMT_GLOBALHEADER)
{
pH264CodecCtx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
}
// set option
AVDictionary *params = NULL;
//H.264
av_dict_set(¶ms, "preset", "superfast", 0);
av_dict_set(¶ms, "tune", "zerolatency", 0); //實現實時編碼
if (avcodec_open2(pH264CodecCtx, pH264Codec, ¶ms) < 0)
{
printf("can't open video encoder.\n");
return -1;
}
#endif
//SDL handle
if (SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER))
{
printf("can't initialize SDL - %s\n", SDL_GetError());
return -1;
}
screen_w = pCodecCtx->width;
screen_h = pCodecCtx->height;
screen = SDL_CreateWindow("Simplest ffmpeg device(screen capture)",
SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, screen_w, screen_h,
SDL_WINDOW_OPENGL);
if (!screen)
{
printf("SDL: can't create window - exiting: %s\n", SDL_GetError());
return -1;
}
sdlRenderer = SDL_CreateRenderer(screen, -1, 0);
sdlTexture = SDL_CreateTexture(sdlRenderer, SDL_PIXELFORMAT_IYUV,
SDL_TEXTUREACCESS_STREAMING, pCodecCtx->width, pCodecCtx->height);
sdlRect.x = 0;
sdlRect.y = 0;
sdlRect.w = screen_w;
sdlRect.h = screen_h;
while (av_read_frame(pFormatCtx, pPacket) >= 0)
{
if (pPacket->stream_index == videoIndex)
{
int ret = avcodec_send_packet(pCodecCtx, pPacket);
if (ret < 0)
{
printf("Decode error.\n");
return -1;
}
if (avcodec_receive_frame(pCodecCtx, pFrame) >= 0)
{
sws_scale(pImgConvertCtx,
(const unsigned char* const*) pFrame->data,
pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data,
pFrameYUV->linesize);
#if OUTPUT_YUV420P
int y_size = pCodecCtx->width * pCodecCtx->height;
fwrite(pFrameYUV->data[0], 1, y_size, fpYUV); //Y
fwrite(pFrameYUV->data[1], 1, y_size / 4, fpYUV); //U
fwrite(pFrameYUV->data[2], 1, y_size / 4, fpYUV); //V
#endif
#if OUTPUT_H264
pFrameYUV->format = pCodecCtx->pix_fmt;
pFrameYUV->width = pCodecCtx->width;
pFrameYUV->height = pCodecCtx->height;
int ret = avcodec_send_frame(pH264CodecCtx, pFrameYUV);
if (ret < 0)
{
printf("failed to encode.\n");
return -1;
}
if (avcodec_receive_packet(pH264CodecCtx, pPacket) >= 0)
{
ret = fwrite(pPacket->data, 1, pPacket->size, fpH264);
if (ret < 0)
{
printf("write into output.h264 failed.\n");
}
}
#endif
SDL_UpdateTexture(sdlTexture, &sdlRect, pFrameYUV->data[0],
pFrameYUV->linesize[0]);
SDL_RenderClear(sdlRenderer);
SDL_RenderCopy(sdlRenderer, sdlTexture, NULL, &sdlRect);
SDL_RenderPresent(sdlRenderer);
SDL_Delay(40);
}
}
av_packet_unref(pPacket);
}
sws_freeContext(pImgConvertCtx);
#if OUTPUT_YUV420P
fclose(fpYUV);
#endif
#if OUTPUT_H264
fclose(fpH264);
#endif
SDL_Quit();
av_free(outBuffer);
av_frame_free(&pFrameYUV);
av_frame_free(&pFrame);
avcodec_close(pCodecCtx);
avcodec_close(pH264CodecCtx);
avformat_close_input(&pFormatCtx);
return 0;
}