Android Studio FFMPEG 入門

  (一).目的:在Android Studio上調用FFmpeg,播放視頻,rtsp流

(二).準備工作:ffmpeg庫下載    ndk下載 (我用的是 android-ndk-r20b   Ffmpeg 3.0)

(三).1.環境變量中配置好ndk路徑  ,如下圖:

          2.Android Studio中配置好ndk路徑,如下圖:

      ----------------------------

 

(四).調用FFmpeg的整體調用邏輯爲:
             
1 編譯完ffmpeg庫(我直接網上拷貝下來編譯好的)
              2 使用jni方式撰寫c代碼,其中需要包含相應的ffmpeg的頭文件
              3 撰寫相應的Android.mk文件,裏面指定需要編譯的c代碼以及需要鏈接的動態庫
              4 執行ndk-build生成相應的jni庫
              5 創建andorid java程序,代碼中loadLibrary相應ffmpeg庫以及剛纔生成的jni庫
              6 靜態方法聲明native函數,這些函數在jni寫的c語言中都已經實現過




我的操作順序和上面的排序有些不一樣,但是每一步都是不能缺少的,我只是順序不同而已,最終不影響運行

1.操作如下:在main下新建jni文件夾

2.加入編譯好的庫和自定義的方法(我自己創建了一個類MyNdk)

注意點:play是視頻播放的方法,別的方法可以不要,對應的test.c裏面也只留play的方法

3.生成com_example_im_myapplication_MyNdk.h 文件

javah -encoding UTF-8 -classpath C:\Users\IM\AppData\Local\Android\Sdk\platforms\android-25\android.jar;. -jni com.example.im.ffmpegtest3.MainActivity(自己項目的路徑)

4.在Jni 包下面加入已經編譯好的 ffmpeg庫中的Inclued 和 lib 文件夾,直接複製進來

5.加入 Android.mk    Application.mk   test.c

a:Android.mk 文件代碼如下:

LOCAL_PATH := $(call my-dir)

# FFmpeg library
include $(CLEAR_VARS)
LOCAL_MODULE := avcodec
LOCAL_SRC_FILES := libavcodec-57.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE := avfilter
LOCAL_SRC_FILES := libavfilter-6.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE := avformat
LOCAL_SRC_FILES := libavformat-57.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE := avutil
LOCAL_SRC_FILES := libavutil-55.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE := swresample
LOCAL_SRC_FILES := libswresample-2.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE := swscale
LOCAL_SRC_FILES := libswscale-4.so
include $(PREBUILT_SHARED_LIBRARY)

# Program
include $(CLEAR_VARS)
LOCAL_MODULE := MyApplication(名字要對應)
LOCAL_SRC_FILES := test.c   (名字要對應)

LOCAL_C_INCLUDES += $(LOCAL_PATH)/include
LOCAL_LDLIBS := -llog -lz -landroid
LOCAL_SHARED_LIBRARIES := avcodec avfilter avformat avutil swresample swscale
include $(BUILD_SHARED_LIBRARY)

b: Application.mk代碼如下:

APP_ABI := armeabi-v7a
APP_MODULES := MyApplication(名字要對應)

c:test.c代碼如下:方法的名字格式要注意一下,最容易出錯了

#include <stdio.h>
#include "com_example_im_myapplication_MyNdk.h"
#include "include/libavformat/avformat.h"
#include "include/libavcodec/avcodec.h"
#include "include/libavutil/avutil.h"
#include "include/libavfilter/avfilter.h"

#include <android/native_window_jni.h>
#include <android/native_window.h>

#include <jni.h>
#include <android/log.h>

#define LOG_TAG "JNI"
#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__)
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__)


#define SWS_BICUBIC           4
#define SWS_BILINEAR           2

/**
 * com.ihubin.ffmpegstudy.MainActivity.avformatinfo()
 * AVFormat Support Information
 */
JNIEXPORT jstring Java_com_example_im_myapplication_MyNdk_avformatinfo(JNIEnv *env, jobject obj){

    char info[40000] = { 0 };

    av_register_all();

    AVInputFormat *if_temp = av_iformat_next(NULL);
    AVOutputFormat *of_temp = av_oformat_next(NULL);
    //Input
    while(if_temp!=NULL){
        sprintf(info, "%s[In ][%10s]\n", info, if_temp->name);
        if_temp=if_temp->next;
    }
    //Output
    while (of_temp != NULL){
        sprintf(info, "%s[Out][%10s]\n", info, of_temp->name);
        of_temp = of_temp->next;
    }
    //LOGE("%s", info);
    return (*env)->NewStringUTF(env, info);
}

/**
 * com.ihubin.ffmpegstudy.MainActivity.avcodecinfo()
 * AVCodec Support Information
 */
JNIEXPORT jstring JNICALL Java_com_example_im_myapplication_MyNdk_avcodecinfo(JNIEnv *env, jobject obj)
{
    char info[40000] = { 0 };

    av_register_all();

    AVCodec *c_temp = av_codec_next(NULL);

    while(c_temp!=NULL){
        if (c_temp->decode!=NULL){
            sprintf(info, "%s[Dec]", info);
        }
        else{
            sprintf(info, "%s[Enc]", info);
        }
        switch (c_temp->type){
        case AVMEDIA_TYPE_VIDEO:
            sprintf(info, "%s[Video]", info);
            break;
        case AVMEDIA_TYPE_AUDIO:
            sprintf(info, "%s[Audio]", info);
            break;
        default:
            sprintf(info, "%s[Other]", info);
            break;
        }
        sprintf(info, "%s[%10s]\n", info, c_temp->name);


        c_temp=c_temp->next;
    }
    //LOGE("%s", info);

    return (*env)->NewStringUTF(env, info);
}

/**
 * com.ihubin.ffmpegstudy.MainActivity.avfilterinfo()
 * AVFilter Support Information
 */
JNIEXPORT jstring JNICALL Java_com_example_im_myapplication_MyNdk_avfilterinfo(JNIEnv *env, jobject obj)
{
    char info[40000] = { 0 };
    avfilter_register_all();
    AVFilter *f_temp = (AVFilter *)avfilter_next(NULL);
    int i = 0;
    while (f_temp != NULL){
        sprintf(info, "%s[%10s]\n", info, f_temp->name);
        f_temp = f_temp->next;
    }

    return (*env)->NewStringUTF(env, info);
}

/**
 * com.ihubin.ffmpegstudy.MainActivity.urlprotocolinfo()
 * Protocol Support Information
 */
JNIEXPORT jstring JNICALL Java_com_example_im_ffmpegtest3_MainActivity_configurationinfo(JNIEnv *env, jobject obj)
{
    char info[10000] = {0};
    av_register_all();

    sprintf(info, "%s\n", avcodec_configuration());

    //LOGE("%s", info);
    return (*env)->NewStringUTF(env, info);
}

//2020-12-24wpp
static AVPacket *vPacket;
static AVFrame *vFrame, *pFrameRGBA;
static AVCodecContext *vCodecCtx;
struct SwsContext *img_convert_ctx;
static AVFormatContext *pFormatCtx;
ANativeWindow* nativeWindow;
ANativeWindow_Buffer windowBuffer;
uint8_t *v_out_buffer;

JNIEXPORT void JNICALL Java_com_example_im_myapplication_MyNdk_play(JNIEnv *env, jclass clazz, jstring url, jobject surface) {
    //char input_str[500]={0};

    //sprintf(input_str, "%s", (*env)->GetStringUTFChars(env,url,NULL));
   // LOGI("%s",input_str);
      char file_name[500]={0};
      //讀取輸入的視頻頻文件地址
      sprintf(file_name, "%s", (*env)->GetStringUTFChars(env,url, NULL));
      //LOGI("%s",file_name);
      // char * file_name = "/storage/emulated/999/tencent/MicroMsg/WeiXin/mmexport1588601045658.mp4";
       av_register_all();

        AVFormatContext * pFormatCtx = avformat_alloc_context();

        //AVDictionary* options = NULL;
        //av_dict_set(&options, "rtsp_transport", "udp", 0);

        // Open video file
        if(avformat_open_input(&pFormatCtx, file_name, NULL, NULL)!=0) {

        LOGE("Couldn't open file:%s\n", file_name);
        return; // Couldn't open file
        }

        // Retrieve stream information
        if(avformat_find_stream_info(pFormatCtx, NULL)<0) {
        LOGE("Couldn't find stream information.");
        return;
        }
        // Find the first video stream
        int videoStream = -1,i;
        for (i = 0; i < pFormatCtx->nb_streams; i++) {
         if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO
            && videoStream < 0) {
             videoStream = i;
         }
        }
        if(videoStream==-1) {
         LOGE("Didn't find a video stream.");
         return; // Didn't find a video stream
        }

        // Get a pointer to the codec context for the video stream
        AVCodecContext  * pCodecCtx = pFormatCtx->streams[videoStream]->codec;

        // Find the decoder for the video stream
        AVCodec * pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
        if(pCodec==NULL) {
         LOGE("Codec not found.");
         return; // Codec not found
        }
        if(avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
         LOGE("Could not open codec.");
         return; // Could not open codec
        }
        // 獲取native window
        ANativeWindow* nativeWindow = ANativeWindow_fromSurface(env, surface);
        // 獲取視頻寬高
        int videoWidth = pCodecCtx->width;
        int videoHeight = pCodecCtx->height;
        // 設置native window的buffer大小,可自動拉伸
        ANativeWindow_setBuffersGeometry(nativeWindow,  videoWidth, videoHeight, WINDOW_FORMAT_RGBA_8888);
        ANativeWindow_Buffer windowBuffer;

        if(avcodec_open2(pCodecCtx, pCodec, NULL)<0) {
         LOGE("Could not open codec.");
         return; // Could not open codec
         }
        // Allocate video frame
        AVFrame * pFrame = av_frame_alloc();
        // 用於渲染
        AVFrame * pFrameRGBA = av_frame_alloc();
        if(pFrameRGBA == NULL || pFrame == NULL) {
            LOGE("Could not allocate video frame.");
            return;
        }
        // Determine required buffer size and allocate buffer
            int numBytes=av_image_get_buffer_size(AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height, 1);
            uint8_t * buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t));
            av_image_fill_arrays(pFrameRGBA->data, pFrameRGBA->linesize, buffer, AV_PIX_FMT_RGBA,
                                 pCodecCtx->width, pCodecCtx->height, 1);
        // 由於解碼出來的幀格式不是RGBA的,在渲染之前需要進行格式轉換
        struct SwsContext *sws_ctx = sws_getContext(pCodecCtx->width,
                             pCodecCtx->height,
                             pCodecCtx->pix_fmt,
                             pCodecCtx->width,
                             pCodecCtx->height,
                             AV_PIX_FMT_RGBA,
                             SWS_BILINEAR,
                             NULL,
                             NULL,
                             NULL);

        int frameFinished;
        AVPacket packet;
        while(av_read_frame(pFormatCtx, &packet)>=0) {
        // Is this a packet from the video stream?
        if(packet.stream_index==videoStream) {
            // Decode video frame
            avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);

            // 並不是decode一次就可解碼出一幀
            if (frameFinished) {
                // lock native window buffer
                ANativeWindow_lock(nativeWindow, &windowBuffer, 0);
                // 格式轉換
                sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data,
                          pFrame->linesize, 0, pCodecCtx->height,
                          pFrameRGBA->data, pFrameRGBA->linesize);
                // 獲取stride
                uint8_t * dst = windowBuffer.bits;
                int dstStride = windowBuffer.stride * 4;
                uint8_t * src = (uint8_t*) (pFrameRGBA->data[0]);
                int srcStride = pFrameRGBA->linesize[0];
                // 由於window的stride和幀的stride不同,因此需要逐行復制
                int h;
                // LOGI("%d",videoHeight);
                for (h = 0; h < videoHeight; h++) {
                    memcpy(dst + h * dstStride, src + h * srcStride, srcStride);
                }
                //LOGI("%s","111111111111");
                ANativeWindow_unlockAndPost(nativeWindow);
            }
        }
        av_packet_unref(&packet);
    }
     //內存釋放
    ANativeWindow_release(nativeWindow);
    av_free(buffer);
    av_free(pFrameRGBA);
    // Free the YUV frame
    av_free(pFrame);
    // Close the codecs
    avcodec_close(pCodecCtx);
    // Close the video file
    avformat_close_input(&pFormatCtx);
    //return 0;
}
注意點:①include 文件名稱別寫錯了   ② 方法的名字格式不要錯了      ③Java_com_example_im_myapplication_MyNdk_play是視頻播放的方法,別的方法可以不要

 

6.ndk-build,先進入到jni的目錄下

有時候會提示不存在該命令:那麼先set path一下   set path=D:\software\android-ndk-r20b\build    然後再ndk-buil(根據自己的地址來)

6.現在只要在界面上顯示出來就可以了

7.運行成功:

研究了好久,終於研究出來了,開心,爲自己鼓個掌 

 

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章