Android 編譯FFmpeg x264

1. Android FFmpeg開發基本流程

(1)X264/FFmpeg + NDK編譯
(2)ffmpeg.so + 編解碼C代碼
(3)Android.mk 編譯
(4)JNI
(5)JAVA代碼調用

基本流程如下圖所示:
這裏寫圖片描述

本文涉及FFmpeg,x264編譯。

2. NDK配置

首先需要配置NDK開發環境,略
(1)設置$NDK環境變量

# Detect NDK
if [[  -z "$NDK"  ]]; then
    echo "The NDK dir is empty, If the shell can not run normally, you should set the NDK variable to your local ndk.dir"
    exit 1
fi

(2)檢查系統類型

# Detect OS
OS=`uname`
HOST_ARCH=`uname -m`
export CCACHE=; type ccache >/dev/null 2>&1 && export CCACHE=ccache
if [ $OS == 'Linux' ]; then
    export HOST_SYSTEM=linux-$HOST_ARCH
elif [ $OS == 'Darwin' ]; then
    export HOST_SYSTEM=darwin-$HOST_ARCH
fi

(3)配置Sysroot和cross_prefix

SYSROOT=$NDK/platforms/android-16/arch-arm
CROSS_PREFIX=$NDK/toolchains/arm-linux-androideabi-4.9/prebuilt/$HOST_SYSTEM/bin/arm-linux-androideabi-

2. x264編譯

2.1 下載源代碼

git clone http://git.videolan.org/git/x264.git 

2.2 編譯腳本

#!/bin/bash

echo "###### 開始編譯 x264 ######"

SOURCE=$TARGET_X264_DIR
cd $SOURCE
#PREFIX指定的是編譯輸出路徑,不指定默認是/usr/local/lib和/usr/local/include
#PREFIX=../build

EXTRA_CFLAGS="-march=armv7-a -mfloat-abi=softfp -mfpu=neon -D__ARM_ARCH_7__ -D__ARM_ARCH_7A__"
EXTRA_LDFLAGS="-nostdlib"

./configure \
    --prefix=$PREFIX \
    --cross-prefix=$CROSS_PREFIX \
    --extra-cflags="$EXTRA_CFLAGS" \
    --extra-ldflags="$EXTRA_LDFLAGS" \
    --enable-static \
    --enable-pic \
    --enable-strip \
    --disable-cli \
    --host=arm-linux \
    --sysroot=$SYSROOT

make clean
make && make install

這裏面需要注意的是:
(1)PREFIX指定的是編譯輸出路徑,不指定默認是/usr/local/lib和/usr/local/include

這裏講ffmpeg和x264統一放在工程根目錄中build文件夾

(2)編譯時不能進行多線程,否則彙編優化報錯,如下:

make -j4

make: [common/arm/deblock-a.o] Error 127 (ignored)
/Users/guohe/Android/android-ndk/toolchains/arm-linux-androideabi-4.9/prebuilt/darwin-x86_64/bin/arm-linux-androideabi-gcc -I. -I. -c -DSTACK_ALIGNMENT=4 -DPIC -DHIGH_BIT_DEPTH=0 -DBIT_DEPTH=8 -o common/arm/predict-a.o common/arm/predict-a.S
/bin/sh: j4: command not found

2.3. 結論

Linux中編譯x264:

sudo ./configure --enable-shared --prefix=/usr/local
sudo make
sudo make install

通過對比發現:Android中編譯x264,最基本的就是配置SYSROOT和CROSS_PREFIX

3. 編譯FFmpeg

3.1 下載源代碼

git clone git://source.ffmpeg.org/ffmpeg.git $FFMPEG_SOURCE_DIR

3.2 編譯腳本

ADD_H264_FEATURE="--enable-encoder=aac \
    --enable-decoder=aac \
    --enable-encoder=libx264 \
    --enable-libx264 \
    --extra-cflags=-I$PREFIX/include \
    --extra-ldflags=-L$PREFIX/lib "
    ./configure \
        --prefix=$PREFIX \
        --enable-pthreads \
        --enable-gpl \
        --enable-version3 \
        --enable-nonfree \
        --enable-static \
        --enable-small \
        --enable-asm \
        --enable-neon \
        --cross-prefix=$CROSS_PREFIX \
        --target-os=linux \
        --arch=arm \
        --enable-cross-compile \
        --sysroot=$SYSROOT \
        $ADD_H264_FEATURE

    make clean
    make -j4
    make install

注:
(1)跨平臺編譯特殊配置
–sysroot=SYSROOTcrossprefix= CROSS_PREFIX
–target-os=linux

(2)支持x264
–enable-encoder=libx264 \
–enable-libx264 \
–extra-cflags=-IPREFIX/include extraldflags=L PREFIX/lib

(3)編譯優化
–enable-asm \
–enable-neon \

3.3 編譯結果

至此,ffmpeg和x264已經編譯完成,編譯結果如下:
這裏寫圖片描述

這裏寫圖片描述

這裏寫圖片描述

4. NDK + FFmpeg開發

ffmpeg,x264已經編譯完成,下面就聊聊怎麼用ffmpeg庫進行開發。
下面展示一個基於FFmpeg開發的解碼程序,其功能是從一個視頻中解碼5幀數據。

4.1 複製編譯FFmpeg,x264庫與頭文件

cp $PREFIX/lib/*.a ./jni/lib
cp -r $PREFIX/include/* ./jni/

4.2 編寫Android.mk

Android.mk主要包含幾下幾塊:
(1)引入庫文件
libavcodec.a libavdevice.a libavfilter.a libavformat.a libavutil.a libpostproc.a libswresample.a libswscale.a libx264.a

#static version of libavutil
include $(CLEAR_VARS)
LOCAL_MODULE:= libavutil_static
LOCAL_SRC_FILES:= lib/libavutil.a
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)
include $(PREBUILT_STATIC_LIBRARY)

(2)編譯源文件
需要設置:
LOCAL_MODULE //庫名稱
LOCAL_SRC_FILES //源代碼文件
LOCAL_LDLIBS //額外鏈接的庫
LOCAL_CFLAGS //CFlags

(3)編譯何種類型

include $(BUILD_SHARED_LIBRARY)

(4)詳細腳本如下

LOCAL_PATH := $(call my-dir)

#include $(call all-subdir-makefiles)

#static version of libavcodec
include $(CLEAR_VARS)
LOCAL_MODULE:= libavcodec_static
LOCAL_SRC_FILES:= lib/libavcodec.a
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)
include $(PREBUILT_STATIC_LIBRARY)

#static version of libavformat
include $(CLEAR_VARS)
LOCAL_MODULE:= libavformat_static
LOCAL_SRC_FILES:= lib/libavformat.a
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)
include $(PREBUILT_STATIC_LIBRARY)

#static version of libswscale
include $(CLEAR_VARS)
LOCAL_MODULE:= libswscale_static
LOCAL_SRC_FILES:= lib/libswscale.a
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)
include $(PREBUILT_STATIC_LIBRARY)

#static version of libavutil
include $(CLEAR_VARS)
LOCAL_MODULE:= libavutil_static
LOCAL_SRC_FILES:= lib/libavutil.a
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)
include $(PREBUILT_STATIC_LIBRARY)

#static version of libavdevice
include $(CLEAR_VARS)
LOCAL_MODULE:= libavdevice_static
LOCAL_SRC_FILES:= lib/libavdevice.a
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)
include $(PREBUILT_STATIC_LIBRARY)

#static version of libavfilter
include $(CLEAR_VARS)
LOCAL_MODULE:= libavfilter_static
LOCAL_SRC_FILES:= lib/libavfilter.a
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)
include $(PREBUILT_STATIC_LIBRARY)

#static version of libswresample
include $(CLEAR_VARS)
LOCAL_MODULE:= libswresample_static
LOCAL_SRC_FILES:= lib/libswresample.a
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)
include $(PREBUILT_STATIC_LIBRARY)

#static version of libpostproc
include $(CLEAR_VARS)
LOCAL_MODULE:= libpostproc_static
LOCAL_SRC_FILES:= lib/libpostproc.a
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)
include $(PREBUILT_STATIC_LIBRARY)

#static version of libx264
include $(CLEAR_VARS)
LOCAL_MODULE:= libx264_static
LOCAL_SRC_FILES:= lib/libx264.a
LOCAL_CFLAGS := -march=armv7-a -mfloat-abi=softfp -mfpu=neon -O3 -ffast-math -funroll-loops
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)
include $(PREBUILT_STATIC_LIBRARY)

include $(CLEAR_VARS)

LOCAL_MODULE := ffmpeg
LOCAL_SRC_FILES := decoder.c encoder.c
LOCAL_LDLIBS := -llog -lz
LOCAL_CFLAGS := -march=armv7-a -mfloat-abi=softfp -mfpu=neon -O3 -ffast-math -funroll-loops
LOCAL_WHOLE_STATIC_LIBRARIES := libavformat_static \
                        libavcodec_static \
                        libavutil_static \
                        libpostproc_static \
                        libswscale_static \
                        libswresample_static \
                        libx264_static \
                        libavfilter_static \
                        libavdevice_static \

include $(BUILD_SHARED_LIBRARY)

4.3 解碼程序

Native程序一般分爲兩塊:
(1)JNI調用接口

/*
 * Class:     com_example_jnidemo_MainActivity
 * Method:    info
 * Signature: (Ljava/lang/String;)V
 */
JNIEXPORT void JNICALL Java_com_example_jnidemo_MainActivity_info
  (JNIEnv *, jobject, jstring);

(2)功能模塊

詳細代碼如下:

#include <jni.h>
#include <android/log.h>
#include <stdio.h>

#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libavformat/avio.h>
#include <libavutil/fifo.h>
#include <libavutil/avutil.h>
#include <libavutil/mem.h>
#include <libswscale/swscale.h>

#include "com_example_jnidemo_MainActivity.h"

#define  LOG_TAG    "FFMPEG INFO"
#define  LOGI(...)  __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__)
#define  LOGE(...)  __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)


void SaveFrame(AVFrame *pFrame, int width, int height, int iFrame) {
  FILE* pFile;
  char szFileName[32];
  int y;

  sprintf(szFileName,"/mnt/sdcard/test/frame%d.ppm",iFrame);

  LOGI("filename : %s",szFileName);

  pFile = fopen(szFileName,"w");
  if(pFile == NULL){
      LOGI("can not open file %s",szFileName);
      return ;
  }

  fprintf(pFile,"P6\n%d %d\n255\n",width,height);

  for(y=0;y<width;++y){
      LOGI("Write file AVFrame");
      fwrite(pFrame->data[0]+y*pFrame->linesize[0],1,width*3,pFile);
  }

  fclose(pFile);
  LOGI("close file %s",szFileName);
}

JNIEXPORT void JNICALL Java_com_example_jnidemo_MainActivity_info(JNIEnv *env, jobject obj, jstring jpath){
    const jbyte* path = (*env)->GetStringUTFChars(env,jpath,NULL);
    AVFormatContext   *pFormatCtx = NULL;
    int               i, videoStream;
    AVCodecContext    *pCodecCtxOrig = NULL;
    AVCodecContext    *pCodecCtx = NULL;
    AVCodec           *pCodec = NULL;
    AVFrame           *pFrame = NULL;
    AVFrame           *pFrameRGB = NULL;
    AVPacket          packet;
    int               frameFinished;
    int               numBytes;
    uint8_t           *buffer = NULL;
    struct SwsContext *sws_ctx = NULL;

    av_register_all();

//      if(avformat_open_input(&pFormatCtx, path, NULL, NULL)!=0)
//      {
//          LOGE("Could not open the file : %s",path);
//          return ;
//    }

    int err_code;

    if(err_code=avformat_open_input(&pFormatCtx, path, NULL, NULL))
    {
        char buf[256];
        av_strerror(err_code, buf, 1024);
        LOGE("Couldn't open file %s: %d(%s)", path, err_code, buf);
        return;
    }

    if(avformat_find_stream_info(pFormatCtx, NULL)<0)
        return ;

    av_dump_format(pFormatCtx, 0, path, 0);

    videoStream=-1;
    for(i=0; i<pFormatCtx->nb_streams; i++)
        if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO)
        {
            videoStream=i;
            break;
        }

    if(videoStream==-1)
        return ;

    pCodecCtxOrig=pFormatCtx->streams[videoStream]->codec;

    pCodec=avcodec_find_decoder(pCodecCtxOrig->codec_id);
    if(pCodec==NULL)
    {
        LOGE("Unsupported codec!\n");
        return ;
    }

    pCodecCtx = avcodec_alloc_context3(pCodec);
    if(avcodec_copy_context(pCodecCtx, pCodecCtxOrig) != 0)
    {
        LOGE("Couldn't copy codec context");
        return ;
    }

    if(avcodec_open2(pCodecCtx, pCodec, NULL)<0)
        return ;

    pFrame=av_frame_alloc();
    pFrameRGB=av_frame_alloc();
    if(pFrameRGB==NULL)
        return ;

    numBytes=avpicture_get_size(AV_PIX_FMT_RGB24, pCodecCtx->width,pCodecCtx->height);
    buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t));

    avpicture_fill((AVPicture *)pFrameRGB, buffer, AV_PIX_FMT_RGB24,pCodecCtx->width, pCodecCtx->height);

    sws_ctx = sws_getContext(pCodecCtx->width,
               pCodecCtx->height,
               pCodecCtx->pix_fmt,
               pCodecCtx->width,
               pCodecCtx->height,
               AV_PIX_FMT_RGB24,
               SWS_BILINEAR,
               NULL,
               NULL,
               NULL
               );

    i=0;
    while(av_read_frame(pFormatCtx, &packet)>=0) {
        if(packet.stream_index==videoStream) {

            avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);

            if(frameFinished) {

                sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data,
          pFrame->linesize, 0, pCodecCtx->height,
          pFrameRGB->data, pFrameRGB->linesize);

                if(++i<=5)
                    SaveFrame(pFrameRGB, pCodecCtx->width, pCodecCtx->height, i);
            }
        }
        av_free_packet(&packet);
    }
    av_free(buffer);
    av_frame_free(&pFrameRGB);
    av_frame_free(&pFrame);
    avcodec_close(pCodecCtx);
    avcodec_close(pCodecCtxOrig);
    avformat_close_input(&pFormatCtx);
    (*env)->ReleaseStringUTFChars(env,jpath,path);
}

4.4 編譯native程序

進入jni目錄,執行:

$NDK_PATH/ndk-build

編譯結果:
編譯完成後,在lib目錄下可以看到編譯的so文件,在Android.mk中設置的庫文件名爲ffmpeg,那麼編譯的文件應該是”libffmpeg.so”

4.5 JAVA調用JNI

(1)加載庫文件

    static {
        System.loadLibrary("ffmpeg");
    }

(2)JNI接口
所有的JNI接口都有native關鍵字

private native void info(String path);

(3)詳細代碼

package com.example.jnidemo;

import android.app.Activity;
import android.os.Bundle;
import android.os.Environment;

public class MainActivity extends Activity {

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        String path = "/sdcard/test/test.mp4";
        info(path);
    }

    private native void info(String path);

    static {
        System.loadLibrary("ffmpeg");
    }

}

5. 項目源碼

AndroidFFmpeg

發佈了231 篇原創文章 · 獲贊 256 · 訪問量 145萬+
發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章