前言
預覽本地視頻的時候加上水印和美白效果
public class VideoPreviewView extends GLSurfaceView{
}
然後在初始化函數中,設置進行OpenGL初始化 private void init(Context context) {
setEGLContextClientVersion(2);
setRenderer(this);
setRenderMode(RENDERMODE_WHEN_DIRTY);
setPreserveEGLContextOnPause(false);
setCameraDistance(100);
mDrawer = new VideoDrawer(context,getResources());
//初始化Drawer和VideoPlayer
mMediaPlayer = new MediaPlayerWrapper();
mMediaPlayer.setOnCompletionListener(this);
}
上面代碼中的VideoDrawer和MediaPlayerWrapper就是控制OpenGL繪製和視頻播放的重點類,其實VideoPreviewView類和我們之前的CameraView類是完全類似的,只不過一個是從攝像頭獲取數據,一個是從視頻解碼器獲取數據而已。下面我們來分別說說mediaPlayerWrapper和VideoDrawer類。 public class VideoDrawer implements GLSurfaceView.Renderer {
}
然後,在構造函數中初始化要用到的Filter,包括美顏的MagicBeautyFilter和加水印的WaterMarkFilter public VideoDrawer(Context context,Resources res){
mPreFilter = new RotationOESFilter(res);//旋轉相機操作
mShow = new NoFilter(res);
mBeFilter = new GroupFilter(res);
mBeautyFilter = new MagicBeautyFilter();
mProcessFilter=new ProcessFilter(res);
WaterMarkFilter waterMarkFilter = new WaterMarkFilter(res);
waterMarkFilter.setWaterMark(BitmapFactory.decodeResource(res, R.mipmap.watermark));
waterMarkFilter.setPosition(0,70,0,0);
mBeFilter.addFilter(waterMarkFilter);
}
上面這段代碼就不進行過多解釋了,在該系列文章的第二篇中,有比較詳細的解釋,包括美白Filter和水印Filter的實現原理,不懂的童鞋,請翻閱上篇文章。 @Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
int texture[]=new int[1];
GLES20.glGenTextures(1,texture,0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES ,texture[0]);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
surfaceTexture = new SurfaceTexture(texture[0]);
mPreFilter.create();
mPreFilter.setTextureId(texture[0]);
mBeFilter.create();
mProcessFilter.create();
mShow.create();
mBeautyFilter.init();
mBeautyFilter.setBeautyLevel(3);//默認設置3級的美顏
}
在onSurfaceChanged函數中,設置視圖、紋理、濾鏡的寬高 @Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
viewWidth=width;
viewHeight=height;
GLES20.glDeleteFramebuffers(1, fFrame, 0);
GLES20.glDeleteTextures(1, fTexture, 0);
GLES20.glGenFramebuffers(1,fFrame,0);
EasyGlUtils.genTexturesWithParameter(1,fTexture,0, GLES20.GL_RGBA,viewWidth,viewHeight);
mBeFilter.setSize(viewWidth,viewHeight);
mProcessFilter.setSize(viewWidth,viewHeight);
mBeautyFilter.onDisplaySizeChanged(viewWidth,viewHeight);
mBeautyFilter.onInputSizeChanged(viewWidth,viewHeight);
}
然後在onDrawFrame中,對每一幀的視頻數據進行處理,並且顯示
@Override
public void onDrawFrame(GL10 gl) {
surfaceTexture.updateTexImage();
EasyGlUtils.bindFrameTexture(fFrame[0],fTexture[0]);
GLES20.glViewport(0,0,viewWidth,viewHeight);
mPreFilter.draw();
EasyGlUtils.unBindFrameBuffer();
mBeFilter.setTextureId(fTexture[0]);
mBeFilter.draw();
if (mBeautyFilter != null && isBeauty && mBeautyFilter.getBeautyLevel() != 0){
EasyGlUtils.bindFrameTexture(fFrame[0],fTexture[0]);
GLES20.glViewport(0,0,viewWidth,viewHeight);
mBeautyFilter.onDrawFrame(mBeFilter.getOutputTexture());
EasyGlUtils.unBindFrameBuffer();
mProcessFilter.setTextureId(fTexture[0]);
}else {
mProcessFilter.setTextureId(mBeFilter.getOutputTexture());
}
mProcessFilter.draw();
GLES20.glViewport(0,0,viewWidth,viewHeight);
mShow.setTextureId(mProcessFilter.getOutputTexture());
mShow.draw();
}
/**切換開啓美白效果*/
public void switchBeauty(){
isBeauty = !isBeauty;
}
VideoDrawer類基本上就是這些,跟CameraDrawer的添加水印和美白效果的方式完全一樣,但是少了視頻錄製的相關過程,因爲我們對本地視頻的處理並不是實時錄製的,而是後面才進行錄製,所以其實更加簡單了。 private List<MediaPlayer> mPlayerList; //player list
private List<String> mSrcList; //video src list
private List<VideoInfo> mInfoList; //video info list
public MediaPlayerWrapper() {
mPlayerList = new ArrayList<>();
mInfoList = new ArrayList<>();
}
提供了setDataSource方法,用於設置視頻的播放地址
public void setDataSource(List<String> dataSource) {
this.mSrcList = dataSource;
MediaMetadataRetriever retr = new MediaMetadataRetriever();
for (int i = 0; i < dataSource.size(); i++) {
VideoInfo info = new VideoInfo();
String path=dataSource.get(i);
retr.setDataSource(path);
String rotation = retr.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_ROTATION);
String width = retr.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_WIDTH);
String height = retr.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_HEIGHT);
String duration = retr.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION);
info.path=path;
info.rotation = Integer.parseInt(rotation);
info.width = Integer.parseInt(width);
info.height = Integer.parseInt(height);
info.duration = Integer.parseInt(duration);
mInfoList.add(info);
}
}
這裏的VideoInfo,其實就是自定義的一個視頻信息的bean public void prepare() throws IOException {
for (int i = 0; i < mSrcList.size(); i++) {
MediaPlayer player = new MediaPlayer();
player.setAudioStreamType(AudioManager.STREAM_MUSIC);
player.setOnCompletionListener(this);
player.setOnErrorListener(this);
player.setOnPreparedListener(this);
player.setDataSource(mSrcList.get(i));
player.prepare();
mPlayerList.add(player);
if (i == 0) {
mCurMediaPlayer = player;
if (mCallback != null) {
mCallback.onVideoChanged(mInfoList.get(0));
}
}
}
}
然後是視頻的start、pause和stop,我們有多個MediaPlayer當然不可能同時進行播放,所以有一個mCurmediaPlayer,用來控制當前播放的是哪個視頻。
public void start() {
mCurMediaPlayer.setSurface(surface);
mCurMediaPlayer.start();
if (mCallback != null) {
mCallback.onVideoStart();
}
}
public void pause() {
mCurMediaPlayer.pause();
if (mCallback != null) {
mCallback.onVideoPause();
}
}
public void stop() {
mCurMediaPlayer.stop();
}
然後,就是不同播放器的切換,當播放完一個之後,通過switchPlayer切換到下一個播放器 @Override
public void onCompletion(MediaPlayer mp) {
curIndex++;
if (curIndex >= mSrcList.size()) {
curIndex = 0;
if (mCallback != null) {
mCallback.onCompletion(mp);
}
}
switchPlayer(mp);
}
private void switchPlayer(MediaPlayer mp) {
mp.setSurface(null);
if (mCallback != null) {
mCallback.onVideoChanged(mInfoList.get(curIndex));
}
mCurMediaPlayer = mPlayerList.get(curIndex);
mCurMediaPlayer.setSurface(surface);
mCurMediaPlayer.start();
}
上面代碼中,我們看到給當前的player設置了一個顯示錶面,surface,而這個surface就是在VideoPreviewView中設置的,將MediaPlayer和OpenGL聯繫起來的關鍵
public void setSurface(Surface surface) {
this.surface = surface;
}
然後我們的MediaPlayerWrapper類,還有就是一些接口的定義,基本上就是這樣了。 @Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
mDrawer.onSurfaceCreated(gl,config);
SurfaceTexture surfaceTexture = mDrawer.getSurfaceTexture();
surfaceTexture.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() {
@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
requestRender();
}
});
Surface surface = new Surface(surfaceTexture);
mMediaPlayer.setSurface(surface);
try {
mMediaPlayer.prepare();
} catch (IOException e) {
e.printStackTrace();
}
mMediaPlayer.start();
}
@Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
mDrawer.onSurfaceChanged(gl,width,height);
}
@Override
public void onDrawFrame(GL10 gl) {
mDrawer.onDrawFrame(gl);
}
如此方式,我們就很容易的實現了利用MediaPlayer解碼視頻,然後利用OpenGL對視頻數據進行二次處理,再顯示到我們的GLSurfaceView上面。當然我們這裏是一個視頻播放的控件,肯定還有一些對外提供的接口和回調函數。就不一一解釋了。然後就是在預覽界面的使用 ,這個也不多說了,主要將控件寫在xml中,然後給該控件設置視頻的播放地址,然後進行播放即可。
本地視頻解碼,OpenGL美顏,視頻數據編碼成文件
public VideoClipper() {
try {
videoDecoder = MediaCodec.createDecoderByType("video/avc");
videoEncoder = MediaCodec.createEncoderByType("video/avc");
audioDecoder = MediaCodec.createDecoderByType("audio/mp4a-latm");
audioEncoder = MediaCodec.createEncoderByType("audio/mp4a-latm");
} catch (IOException e) {
e.printStackTrace();
}
}
通過名字就可以看出來,我們分別初始化了音頻、視頻的解碼器和編碼器,這篇我們主要講的是視頻的操作,所以暫時不管音頻。
private void initVideoCodec() {
//不對視頻進行壓縮
int encodeW = videoWidth;
int encodeH = videoHeight;
//設置視頻的編碼參數
MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", encodeW, encodeH);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 3000000);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
videoEncoder.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
inputSurface = new InputSurface(videoEncoder.createInputSurface());
inputSurface.makeCurrent();
videoEncoder.start();
VideoInfo info = new VideoInfo();
info.width = videoWidth;
info.height = videoHeight;
info.rotation = videoRotation;
outputSurface = new OutputSurface(info);
outputSurface.isBeauty(isOpenBeauty);
videoDecoder.configure(videoFormat, outputSurface.getSurface(), null, 0);
videoDecoder.start();//解碼器啓動
}
在上面的代碼中,就包括了很關鍵的代碼,就是創建了一個InputSurface和一個OutputSurface。而這兩個類原型其實來自於谷歌工程師編寫的一個音視頻處理的項目grafika。然後我們進行了一些改造。 /** 開啓美顏 */
public void showBeauty(){
isOpenBeauty = true;
}
然後我們去看OutputSurface的isBeauty方法 public void isBeauty(boolean isBeauty){
mDrawer.isOpenBeauty(isBeauty);
}
是不是發現了一個眼熟的東西,對就是mDrawer。該mDrawer其實就是,在OuputSurface初始化的時候創建的一個VideoDrawer mDrawer = new VideoDrawer(MyApplication.getContext(),MyApplication.getContext().getResources());
而這個VideoDrawer在OutputSurface中的主要用法如下,首先在setup函數中進行初始化 private void setup(VideoInfo info) {
mDrawer = new VideoDrawer(MyApplication.getContext(),MyApplication.getContext().getResources());
mDrawer.onSurfaceCreated(null,null);
mDrawer.onSurfaceChanged(null,info.width,info.height);
//在VideoDrawer創建surfaceTexture之後,提供出來
mSurfaceTexture = mDrawer.getSurfaceTexture();
mSurfaceTexture.setOnFrameAvailableListener(this);
mSurface = new Surface(mSurfaceTexture);
}
主要就是先初始化,然後把他內部創建綁定的紋理,提供出來,並且創建一個Surface,這個Surface,通過下面的代碼提供出去 /** Returns the Surface that we draw onto.*/
public Surface getSurface() {
return mSurface;
}
並且,最終是設置在瞭解碼器裏面。 videoDecoder.configure(videoFormat, outputSurface.getSurface(), null, 0);
如此一來,我們通過解碼器解碼的數據,就會經過OutputSurface和VideoDrawer,然後不多說,和上面一樣的,在VideoDrawer裏面對數據進行處理。加上美白和水印等
/** Draws the data from SurfaceTexture onto the current EGL surface.*/
public void drawImage() {
mDrawer.onDrawFrame(null);
}
其實這裏的原理和上面的預覽視頻是一樣的
boolean doRender = (info.size != 0 && info.presentationTimeUs - firstSampleTime > startPosition);
decoder.releaseOutputBuffer(index, doRender);
if (doRender) {
// This waits for the image and renders it after it arrives.
outputSurface.awaitNewImage();
outputSurface.drawImage();
// Send it to the encoder.
inputSurface.setPresentationTime(info.presentationTimeUs * 1000);
inputSurface.swapBuffers();
}
相應的,音視頻的編解碼代碼,網上有許多了,我們這裏暫時不做講解。可能會在後面的文章中,對這部分的內容進行補充。本地視頻預覽效果
處理後視頻播放效果