如何在Android用FFmpeg+SDL2.0解碼顯示圖像

如何在Android上使用FFmpeg解碼圖像參考文章如何在Android用FFmpeg解碼圖像 ,如何在Android上使用SDL2.0來顯示圖像參考零基礎學習SDL開發之在Android使用SDL2.0顯示BMP圖 。有了以上兩篇文章的基礎我們就可以在Android使用FFmpeg來解碼圖像並且使用SDL2.0來顯示輸出的圖像了。

博主的開發環境:Ubuntu 14.04 64位,Eclipse+CDT+ADT+NDK。

在文章開始之前假定你已經知道如何使用NDK編譯FFmpeg,以及知道如何移植SDL2.0到Android平臺上來了,如有不明白的可以參考文章開頭的兩篇文章。

工程中的目錄結構:ffmpeg文件夾下是ffmpeg相關的頭文件以及編譯的Android makefile文件,SDL文件夾下面是SDL2.0相關的頭文件以及相關的源碼和編譯用的Android makefile 文件,player文件下是用來解碼與顯示相關的代碼和Android makefile。

一、參考零基礎學習SDL開發之在Android使用SDL2.0顯示BMP圖 創建一個工程,創建jni文件,在jni下創建SDL文件夾,將相應的頭文件以及源碼拷貝過來。在這裏也可以參考如何在Android用FFmpeg解碼圖像 將編譯好的動態庫作爲PREBUILT_SHARED_LIBRARY來加載。SDL文件下的Android.mk內容:

LOCAL_PATH := $(call my-dir)

###########################
#
# SDL shared library
#
###########################

include $(CLEAR_VARS)

LOCAL_MODULE := SDL2

LOCAL_C_INCLUDES := $(LOCAL_PATH)/include

LOCAL_EXPORT_C_INCLUDES := $(LOCAL_C_INCLUDES)

LOCAL_SRC_FILES := \
    $(subst $(LOCAL_PATH)/,, \
    $(wildcard $(LOCAL_PATH)/src/*.c) \
    $(wildcard $(LOCAL_PATH)/src/audio/*.c) \
    $(wildcard $(LOCAL_PATH)/src/audio/android/*.c) \
    $(wildcard $(LOCAL_PATH)/src/audio/dummy/*.c) \
    $(LOCAL_PATH)/src/atomic/SDL_atomic.c \
    $(LOCAL_PATH)/src/atomic/SDL_spinlock.c.arm \
    $(wildcard $(LOCAL_PATH)/src/core/android/*.c) \
    $(wildcard $(LOCAL_PATH)/src/cpuinfo/*.c) \
    $(wildcard $(LOCAL_PATH)/src/dynapi/*.c) \
    $(wildcard $(LOCAL_PATH)/src/events/*.c) \
    $(wildcard $(LOCAL_PATH)/src/file/*.c) \
    $(wildcard $(LOCAL_PATH)/src/haptic/*.c) \
    $(wildcard $(LOCAL_PATH)/src/haptic/dummy/*.c) \
    $(wildcard $(LOCAL_PATH)/src/joystick/*.c) \
    $(wildcard $(LOCAL_PATH)/src/joystick/android/*.c) \
    $(wildcard $(LOCAL_PATH)/src/loadso/dlopen/*.c) \
    $(wildcard $(LOCAL_PATH)/src/power/*.c) \
    $(wildcard $(LOCAL_PATH)/src/power/android/*.c) \
    $(wildcard $(LOCAL_PATH)/src/filesystem/dummy/*.c) \
    $(wildcard $(LOCAL_PATH)/src/render/*.c) \
    $(wildcard $(LOCAL_PATH)/src/render/*/*.c) \
    $(wildcard $(LOCAL_PATH)/src/stdlib/*.c) \
    $(wildcard $(LOCAL_PATH)/src/thread/*.c) \
    $(wildcard $(LOCAL_PATH)/src/thread/pthread/*.c) \
    $(wildcard $(LOCAL_PATH)/src/timer/*.c) \
    $(wildcard $(LOCAL_PATH)/src/timer/unix/*.c) \
    $(wildcard $(LOCAL_PATH)/src/video/*.c) \
    $(wildcard $(LOCAL_PATH)/src/video/android/*.c) \
    $(wildcard $(LOCAL_PATH)/src/test/*.c))

LOCAL_CFLAGS += -DGL_GLEXT_PROTOTYPES
LOCAL_LDLIBS := -ldl -lGLESv1_CM -lGLESv2 -llog -landroid

include $(BUILD_SHARED_LIBRARY)

###########################
#
# SDL static library
#
###########################

#LOCAL_MODULE := SDL2_static

#LOCAL_MODULE_FILENAME := libSDL2

#LOCAL_SRC_FILES += $(LOCAL_PATH)/src/main/android/SDL_android_main.c

#LOCAL_LDLIBS := 
#LOCAL_EXPORT_LDLIBS := -Wl,--undefined=Java_org_libsdl_app_SDLActivity_nativeInit -ldl -lGLESv1_CM -lGLESv2 -llog -landroid

#include $(BUILD_STATIC_LIBRARY)


二、參考如何在Android用FFmpeg解碼圖像在工程中新建一個ffmpeg文件夾,將與ffmpeg相關的頭文件include進來。ffmpeg文件夾下的Android.mk內容:

LOCAL_PATH := $(call my-dir)

include $(CLEAR_VARS)
LOCAL_MODULE := ffmpeg
LOCAL_SRC_FILES := /path/to/build/ffmpeg/libffmpeg.so
include $(PREBUILT_SHARED_LIBRARY)

三、新建player文件夾,用來編寫解碼與顯示文件。player.c文件內容:

/*
 * SDL_Lesson.c
 *
 *  Created on: Aug 12, 2014
 *      Author: clarck
 */
#include <jni.h>
#include <android/native_window_jni.h>
#include "SDL.h"
#include "SDL_thread.h"
#include "SDL_events.h"
#include "../include/logger.h"
#include "../ffmpeg/include/libavcodec/avcodec.h"
#include "../ffmpeg/include/libavformat/avformat.h"
#include "../ffmpeg/include/libavutil/pixfmt.h"
#include "../ffmpeg/include/libswscale/swscale.h"

int main(int argc, char *argv[]) {
    char *file_path = argv[1];
    LOGI("file_path:%s", file_path);

    AVFormatContext *pFormatCtx;
    AVCodecContext *pCodecCtx;
    AVCodec *pCodec;
    AVFrame *pFrame, *pFrameYUV;
    AVPacket *packet;
    uint8_t *out_buffer;

    SDL_Texture *bmp = NULL;
    SDL_Window *screen = NULL;
    SDL_Rect rect;
    SDL_Event event;

    static struct SwsContext *img_convert_ctx;

    int videoStream, i, numBytes;
    int ret, got_picture;

    av_register_all();
    pFormatCtx = avformat_alloc_context();

    if (SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {
        LOGE("Could not initialize SDL - %s. \n", SDL_GetError());
        exit(1);
    }

    if (avformat_open_input(&pFormatCtx, file_path, NULL, NULL) != 0) {
        LOGE("can't open the file. \n");
        return -1;
    }

    if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
        LOGE("Could't find stream infomation.\n");
        return -1;
    }

    videoStream = 1;
    for (i = 0; i < pFormatCtx->nb_streams; i++) {
        if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
            videoStream = i;
        }
    }

    LOGI("videoStream:%d", videoStream);
    if (videoStream == -1) {
        LOGE("Didn't find a video stream.\n");
        return -1;
    }

    pCodecCtx = pFormatCtx->streams[videoStream]->codec;
    pCodec = avcodec_find_decoder(pCodecCtx->codec_id);

    if (pCodec == NULL) {
        LOGE("Codec not found.\n");
        return -1;
    }

    if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
        LOGE("Could not open codec.\n");
        return -1;
    }

    pFrame = av_frame_alloc();
    pFrameYUV = av_frame_alloc();

    //---------------------------init sdl---------------------------//
    screen = SDL_CreateWindow("My Player Window", SDL_WINDOWPOS_UNDEFINED,
            SDL_WINDOWPOS_UNDEFINED, pCodecCtx->width, pCodecCtx->height,
            SDL_WINDOW_FULLSCREEN | SDL_WINDOW_OPENGL);

    SDL_Renderer *renderer = SDL_CreateRenderer(screen, -1, 0);

    bmp = SDL_CreateTexture(renderer, SDL_PIXELFORMAT_YV12,
            SDL_TEXTUREACCESS_STREAMING, pCodecCtx->width, pCodecCtx->height);

    //-------------------------------------------------------------//

    img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height,
            pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height,
            AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);

    numBytes = avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->width,
            pCodecCtx->height);
    out_buffer = (uint8_t *) av_malloc(numBytes * sizeof(uint8_t));
    avpicture_fill((AVPicture *) pFrameYUV, out_buffer, AV_PIX_FMT_YUV420P,
            pCodecCtx->width, pCodecCtx->height);

    rect.x = 0;
    rect.y = 0;
    rect.w = pCodecCtx->width;
    rect.h = pCodecCtx->height;

    int y_size = pCodecCtx->width * pCodecCtx->height;

    packet = (AVPacket *) malloc(sizeof(AVPacket));
    av_new_packet(packet, y_size);

    av_dump_format(pFormatCtx, 0, file_path, 0);

    while (av_read_frame(pFormatCtx, packet) >= 0) {
        if (packet->stream_index == videoStream) {
            ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture,
                    packet);

            if (ret < 0) {
                LOGE("decode error.\n");
                return -1;
            }

            LOGI("got_picture:%d", got_picture);
            if (got_picture) {
                sws_scale(img_convert_ctx,
                        (uint8_t const * const *) pFrame->data,
                        pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data,
                        pFrameYUV->linesize);
                ////iPitch 計算yuv一行數據佔的字節數
                //SDL_UpdateTexture(bmp, &rect, pFrameYUV->data[0], pFrameYUV->linesize[0]);
                SDL_UpdateYUVTexture(bmp, &rect,
                        pFrameYUV->data[0], pFrameYUV->linesize[0],
                        pFrameYUV->data[1], pFrameYUV->linesize[1],
                        pFrameYUV->data[2], pFrameYUV->linesize[2]);
                SDL_RenderClear(renderer);
                SDL_RenderCopy(renderer, bmp, &rect, &rect);
                SDL_RenderPresent(renderer);
            }
            SDL_Delay(50);
        }
        av_free_packet(packet);

        SDL_PollEvent(&event);
        switch (event.type) {
        case SDL_QUIT:
            SDL_Quit();
            exit(0);
            break;
        default:
            break;
        }
    }
    SDL_DestroyTexture(bmp);

    av_free(out_buffer);
    av_free(pFrameYUV);
    avcodec_close(pCodecCtx);
    avformat_close_input(&pFormatCtx);

    return 0;
}

四、編寫player文件夾下面的Android makefile,內容如下:

LOCAL_PATH := $(call my-dir)

include $(CLEAR_VARS)

LOCAL_MODULE := player

SDL_PATH := ../SDL
FFMPEG_PATH := ../ffmpeg

LOCAL_C_INCLUDES := $(LOCAL_PATH)/include
LOCAL_C_INCLUDES += $(LOCAL_PATH)/$(SDL_PATH)/include
LOCAL_C_INCLUDES += $(LOCAL_PATH)/$(FFMPEG_PATH)/include

# Add your application source files here...
LOCAL_SRC_FILES := $(SDL_PATH)/src/main/android/SDL_android_main.c 
LOCAL_SRC_FILES += player.c

LOCAL_SHARED_LIBRARIES := SDL2
LOCAL_SHARED_LIBRARIES += ffmpeg

LOCAL_LDLIBS := -lGLESv1_CM -lGLESv2 -llog

include $(BUILD_SHARED_LIBRARY)

五、修改SDLActivity.java文件內容,用來加載libffmpeg.so以及libplayer.so,以及修改用來解碼的文件路徑。

// Load the .so
    static {
        System.loadLibrary("ffmpeg");
        System.loadLibrary("SDL2");
        //System.loadLibrary("SDL2_image");
        //System.loadLibrary("SDL2_mixer");
        //System.loadLibrary("SDL2_net");
        //System.loadLibrary("SDL2_ttf");
        System.loadLibrary("player");
    }

/**
    Simple nativeInit() runnable
*/
class SDLMain implements Runnable {
    @Override
    public void run() {
        // Runs SDL_main()
        String sdcard = Environment.getExternalStorageDirectory().getAbsolutePath();
        SDLActivity.nativeInit("/sdcard/a.mp4");

        //Log.v("SDL", "SDL thread terminated");
    }
}

工程項目截圖:



運行效果截圖:



發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章