前言
不知道怎麼使用配置javacv的看上一篇文章----使用javacv中的ffmpeg實現錄屏,結果連運行都失敗了,現在終於解決了
這篇博客是上篇博客代碼的改進,因爲上篇博客的代碼有很多bug。
思路
開啓兩個週期都爲1幀時長的線程,通過Robot類進行抓屏,Sound API進行抓取音頻,然後使用ffmpeg記錄到mp4文件中。
代碼實現:
package com;
import java.awt.AWTException;
import java.awt.Graphics2D;
import java.awt.Rectangle;
import java.awt.RenderingHints;
import java.awt.Robot;
import java.awt.Toolkit;
import java.awt.image.BufferedImage;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.ShortBuffer;
import java.util.Scanner;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.DataLine;
import javax.sound.sampled.LineUnavailableException;
import javax.sound.sampled.TargetDataLine;
import org.bytedeco.ffmpeg.global.avcodec;
import org.bytedeco.ffmpeg.global.avutil;
import org.bytedeco.javacv.FFmpegFrameRecorder;
import org.bytedeco.javacv.Frame;
import org.bytedeco.javacv.FrameRecorder.Exception;
import org.bytedeco.javacv.Java2DFrameConverter;
public class VideoRecord {
//線程池 screenTimer,錄製視頻
private ScheduledThreadPoolExecutor screenTimer;
//獲取屏幕尺寸
private final Rectangle rectangle = new Rectangle(Constant.WIDTH, Constant.HEIGHT); // 截屏的大小
//視頻類 FFmpegFrameRecorder
private FFmpegFrameRecorder recorder;
private Robot robot;
//線程池 exec,錄製音頻
private ScheduledThreadPoolExecutor exec;
private TargetDataLine line;
private AudioFormat audioFormat;
private DataLine.Info dataLineInfo;
///是否開啓錄音設備
private boolean isHaveDevice = true;
private long startTime = 0;
private long videoTS = 0;
private long pauseTimeStart = 0;//開始暫停的時間
private long pauseTime = 0;//暫停的時長
private double frameRate = 5;
private String state="start";//錄製狀態:start正在錄製,pause暫停錄製,stop停止錄製
public String getState() {
return state;
}
public VideoRecord(String fileName, boolean isHaveDevice) {
// TODO Auto-generated constructor stub
recorder = new FFmpegFrameRecorder(fileName + ".mp4", Constant.WIDTH, Constant.HEIGHT);
// recorder.setVideoCodec(avcodec.AV_CODEC_ID_H265); // 28
recorder.setVideoCodec(avcodec.AV_CODEC_ID_MPEG4); // 13
recorder.setFormat("mp4");
// recorder.setFormat("mov,mp4,m4a,3gp,3g2,mj2,h264,ogg,MPEG4");
recorder.setSampleRate(44100);
recorder.setFrameRate(frameRate);
recorder.setVideoQuality(0);
recorder.setVideoOption("crf", "23");
// 2000 kb/s, 720P視頻的合理比特率範圍
recorder.setVideoBitrate(1000000);
/**
* 權衡quality(視頻質量)和encode speed(編碼速度) values(值): ultrafast(終極快),superfast(超級快),
* veryfast(非常快), faster(很快), fast(快), medium(中等), slow(慢), slower(很慢),
* veryslow(非常慢)
* ultrafast(終極快)提供最少的壓縮(低編碼器CPU)和最大的視頻流大小;而veryslow(非常慢)提供最佳的壓縮(高編碼器CPU)的同時降低視頻流的大小
* 參考:https://trac.ffmpeg.org/wiki/Encode/H.264 官方原文參考:-preset ultrafast as the
* name implies provides for the fastest possible encoding. If some tradeoff
* between quality and encode speed, go for the speed. This might be needed if
* you are going to be transcoding multiple streams on one machine.
*/
recorder.setVideoOption("preset", "slow");
recorder.setPixelFormat(avutil.AV_PIX_FMT_YUV420P); // yuv420p
recorder.setAudioChannels(2);
recorder.setAudioOption("crf", "0");
// Highest quality
recorder.setAudioQuality(0);
recorder.setAudioCodec(avcodec.AV_CODEC_ID_AAC);
try {
robot = new Robot();
} catch (AWTException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
recorder.start();
} catch (Exception e) {
// TODO 自動生成的 catch 塊
e.printStackTrace();
}
if(isHaveDevice) {
/**
* float sampleRate:採樣率;每秒採樣數 ;
* int sampleSizeInBits:採樣位數;每個樣本中的位數 ;
* int channels:音頻通道數,1爲mono,2爲立體聲;
* boolean signed;
* boolean bigEndian:是否爲大端存儲;指示單個樣本的數據是否以大字節順序存儲
*/
audioFormat = new AudioFormat(44100.0F, 16, 2, true, false);
dataLineInfo = new DataLine.Info(TargetDataLine.class, audioFormat);
try {
line = (TargetDataLine) AudioSystem.getLine(dataLineInfo);
} catch (LineUnavailableException e1) {
// TODO Auto-generated catch block
System.out.println("未獲得音頻線路,"+e1);
}
}
this.isHaveDevice = isHaveDevice;
}
/**
* 開始錄製
*/
public void start() {
state="start";
if (startTime == 0) {
startTime = System.currentTimeMillis();
}
if(pauseTimeStart!=0) {
//計算暫停的時長
pauseTime=System.currentTimeMillis()-pauseTimeStart;
pauseTimeStart=0;//歸零
}
else {
//沒有暫停過,暫停時長爲0
pauseTime=0;
}
// 如果有錄音設備則啓動錄音線程
if (isHaveDevice) {
new Thread(new Runnable() {
@Override
public void run() {
// TODO Auto-generated method stub
SoundCaputre();
}
}).start();
}
//錄屏
screenCaptrue();
}
//開啓錄屏的線程
private void screenCaptrue() {
// 錄屏
screenTimer = new ScheduledThreadPoolExecutor(1);
/***
* 參數:
* command - 要執行的任務
initialDelay - 延遲第一次執行的時間 ,延遲一幀的時間,我們設置的mp4的幀速爲frameRate=每秒5幀,所以一幀的時間爲 1秒/5
period - 連續執行之間的時期 ,執行週期,爲1幀的時間
unit - initialDelay和period參數的時間單位,TimeUnit.MILLISECONDS爲千分之一秒,就是1毫秒
*/
screenTimer.scheduleAtFixedRate(new Runnable() {
@Override
public void run() {
BufferedImage screenCapture = robot.createScreenCapture(rectangle); // 截屏
BufferedImage videoImg = new BufferedImage(Constant.WIDTH, Constant.HEIGHT,
BufferedImage.TYPE_3BYTE_BGR); // 聲明一個BufferedImage用重繪截圖
Graphics2D videoGraphics = videoImg.createGraphics();// 創建videoImg的Graphics2D
videoGraphics.setRenderingHint(RenderingHints.KEY_DITHERING, RenderingHints.VALUE_DITHER_DISABLE);
videoGraphics.setRenderingHint(RenderingHints.KEY_COLOR_RENDERING,
RenderingHints.VALUE_COLOR_RENDER_SPEED);
videoGraphics.setRenderingHint(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_SPEED);
videoGraphics.drawImage(screenCapture, 0, 0, null); // 重繪截圖
Java2DFrameConverter java2dConverter = new Java2DFrameConverter();
Frame frame = java2dConverter.convert(videoImg);
try {
//計算總時長
videoTS = 1000L*(System.currentTimeMillis()-startTime-pauseTime);
// 檢查偏移量
if (videoTS > recorder.getTimestamp()) {
recorder.setTimestamp(videoTS);
}
recorder.record(frame); // 錄製視頻
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
// 釋放資源
videoGraphics.dispose();
videoGraphics = null;
videoImg.flush();
videoImg = null;
java2dConverter = null;
screenCapture.flush();
screenCapture = null;
}
}, (int) (1000 / frameRate), (int) (1000 / frameRate), TimeUnit.MILLISECONDS);
}
/**
* 開啓抓取聲音的線程
*/
public void SoundCaputre() {
try {
if(!line.isRunning()){
line.open(audioFormat);
line.start();
}
} catch (LineUnavailableException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
final int sampleRate = (int) audioFormat.getSampleRate();
final int numChannels = audioFormat.getChannels();
int audioBufferSize = sampleRate * numChannels;
final byte[] audioBytes = new byte[audioBufferSize];
exec = new ScheduledThreadPoolExecutor(1);
exec.scheduleAtFixedRate(new Runnable() {
@Override
public void run() {
try {
int nBytesRead = line.read(audioBytes, 0, line.available());
int nSamplesRead = nBytesRead / 2;
short[] samples = new short[nSamplesRead];
// Let's wrap our short[] into a ShortBuffer and
// pass it to recordSamples
ByteBuffer.wrap(audioBytes).order(ByteOrder.LITTLE_ENDIAN).asShortBuffer().get(samples);
ShortBuffer sBuff = ShortBuffer.wrap(samples, 0, nSamplesRead);
// recorder is instance of
// org.bytedeco.javacv.FFmpegFrameRecorder
recorder.recordSamples(sampleRate, numChannels, sBuff);
// System.gc();
} catch (org.bytedeco.javacv.FrameRecorder.Exception e) {
e.printStackTrace();
}
}
}, (int) (1000 / frameRate), (int) (1000 / frameRate), TimeUnit.MILLISECONDS);
}
/**
* 暫停錄製
*/
public void pause() {
state="pause";
screenTimer.shutdownNow();
screenTimer = null;
if (isHaveDevice) {
exec.shutdownNow();
exec = null;
}
pauseTimeStart = System.currentTimeMillis();
}
/**
* 停止錄製
*/
public void stop() {
state="stop";
if (null != screenTimer) {
screenTimer.shutdownNow();
}
try {
if (isHaveDevice) {
if (null != exec) {
exec.shutdownNow();
}
if (null != line) {
line.stop();
line.close();
}
dataLineInfo = null;
audioFormat = null;
}
recorder.stop();
recorder.release();
recorder.close();
screenTimer = null;
// screenCapture = null;
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public static void main(String[] args) {
VideoRecord videoRecord = new VideoRecord("C:\\Users\\Administrator\\Desktop\\視頻2", true);
videoRecord.start();
System.out.println("****start繼續錄製,pause暫停錄製,stop停止錄製****");
while (true) {
Scanner sc = new Scanner(System.in);
if(sc.hasNext()) {
String cmd=sc.next();
if (cmd.equalsIgnoreCase("stop")) {
videoRecord.stop();
System.out.println("****已經停止錄製****");
break;
}
if (cmd.equalsIgnoreCase("pause")) {
if(videoRecord.getState().equals("pause")) {
System.out.println("*error:已經暫停,請勿重複操作pause*");
continue;
}
videoRecord.pause();
System.out.println("****已暫停,start繼續錄製,stop結束錄製****");
}
if (cmd.equalsIgnoreCase("start")) {
if(videoRecord.getState().equals("start")) {
System.out.println("*error:請勿重複操作start*");
continue;
}
videoRecord.start();
System.out.println("****正在錄製****");
}
}
}
}
}
class Constant{
public final static int WIDTH=Toolkit.getDefaultToolkit().getScreenSize().width;
public final static int HEIGHT=Toolkit.getDefaultToolkit().getScreenSize().height;
}