[Cmake-Android]音視頻總結:
-
[Cmake-Android音視頻]SDK,NDK基本介紹
-
[Cmake-Android音視頻]NDK-r14b編譯ffmpeg3.4支持neon,硬解碼
-
[Cmake-Android音視頻]創建支持ffmpeg3.4的項目
-
[Cmake-Android音視頻]ffmpeg3.4實現解封裝
-
[Cmake-Android音視頻]ffmpeg3.4軟硬解碼和多線程解碼
-
[Cmake-Android音視頻]ffmpeg3.4視頻格式轉換和顯示
-
[Cmake-Android音視頻]ffmpeg3.4音頻重採樣
-
[Cmake-Android音視頻]OpenSLES音頻播放
OpenSL ES 介紹
OpenSL ES 是一個針對嵌入式系統的開放硬件音頻加速庫,也可以將其視爲一套針對嵌入式平臺的音頻標準,全稱爲: Open Sound Library for Embedded Systems ,它提供了一套高性能、 低延遲的音頻功能實現方法,並且實現了軟硬件音頻性能的跨平臺部署,大大降低了上層處理音頻應用的開發難度。
在 Android 開發中,Google 官方從 Android 2.3 (API 9)開始,便支持了 OpenSL ES 標準 ,並且對其進行了擴展。本文介紹的 OpenSL ES 是針對 Android NDK 開發。
OpenSL ES 播放聲音流程
代碼實現:
在cmake文件中引入opensl es的庫
target_link_libraries( # Specifies the target library.
native-lib
OpenSLES
# Links the target library to the log library
# included in the NDK.
${log-lib})
#include <jni.h>
#include <string>
#include <android/log.h>
#include <SLES/OpenSLES.h>
#include <SLES/OpenSLES_Android.h>
#define LOGTAG "native-lib"
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, LOGTAG, __VA_ARGS__)
static SLObjectItf engineSL= NULL;
SLEngineItf CreateSL()
{
SLresult re;
SLEngineItf en;
re = slCreateEngine(&engineSL, 0, 0, 0, 0, 0);
if (re != SL_RESULT_SUCCESS) return NULL;
re = (*engineSL)->Realize(engineSL, SL_BOOLEAN_FALSE);
if (re != SL_RESULT_SUCCESS) return NULL;
re = (*engineSL)->GetInterface(engineSL, SL_IID_ENGINE, &en);
if (re != SL_RESULT_SUCCESS) return NULL;
return en;
}
//回調
void pcmCall(SLAndroidSimpleBufferQueueItf bf, void *content)
{
LOGI(" pcmCall");
static FILE *fp = NULL;
static char *buf = NULL;
if (!buf)
{
buf = new char[1024*1024];
}
if (!fp)
{
fp = fopen("/sdcard/output.pcm", "rb");
}
if (!fp) return;;
if (feof(fp) == 0)
{
int len = fread(buf, 1, 1024, fp);
if (len > 0)
{
(*bf)->Enqueue(bf, buf, len);
}
}
}
extern "C" JNIEXPORT jstring JNICALL
Java_lucas_testopensl_MainActivity_stringFromJNI(
JNIEnv *env,
jobject /* this */) {
std::string hello = "Hello from C++";
//1創建引擎
SLEngineItf eng = CreateSL();
if (eng)
{
LOGI("CreateSL success") ;
} else{
LOGI("CreateSL success");
}
//2.創建混音器
SLObjectItf mix = NULL;
SLresult re = 0;
re = (*eng)->CreateOutputMix(eng, &mix, 0, 0, 0);
if (re != SL_RESULT_SUCCESS)
{
LOGI("CreateOutputMix failed");
}
re = (*mix)->Realize(mix, SL_BOOLEAN_FALSE);
if (re != SL_RESULT_SUCCESS)
{
LOGI("mix Realize failed");
}
SLDataLocator_OutputMix out_mix = {SL_DATALOCATOR_OUTPUTMIX, mix};
SLDataSink audioSink = {&out_mix, 0};
//3配置音頻信息
//緩衝隊列
SLDataLocator_AndroidSimpleBufferQueue que = {SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, 10};
//音頻格式
SLDataFormat_PCM pcm = {
SL_DATAFORMAT_PCM,
2, //聲道數
SL_SAMPLINGRATE_44_1,
SL_PCMSAMPLEFORMAT_FIXED_16,
SL_PCMSAMPLEFORMAT_FIXED_16,
SL_SPEAKER_FRONT_LEFT|SL_SPEAKER_FRONT_RIGHT,
SL_BYTEORDER_LITTLEENDIAN //字節序 小端
};
SLDataSource dataSource = {&que, &pcm};
//4創建播放器
SLObjectItf player = NULL;
SLPlayItf iplayer = NULL;
SLAndroidSimpleBufferQueueItf pcmQue = NULL;
const SLInterfaceID ids[] = {SL_IID_BUFFERQUEUE};
const SLboolean req[] = {SL_BOOLEAN_TRUE};
re = (*eng)->CreateAudioPlayer(eng, &player, &dataSource, &audioSink, sizeof(ids)/sizeof(SLInterfaceID), ids, req);
if (re != SL_RESULT_SUCCESS)
{
LOGI("CreateAudioPlayer failed");
} else{
LOGI("CreateAudioPlayer success");
}
(*player)->Realize(player, SL_BOOLEAN_FALSE);
//獲取player接口
re = (*player)->GetInterface(player, SL_IID_PLAY, &iplayer);
if (re != SL_RESULT_SUCCESS)
{
LOGI("GetInterface SL_IID_PLAY failed");
}else{
LOGI("GetInterface SL_IID_PLAY success");
}
//獲取buffer隊列接口
re = (*player)->GetInterface(player, SL_IID_BUFFERQUEUE, &pcmQue);
if (re != SL_RESULT_SUCCESS)
{
LOGI("GetInterface SL_IID_BUFFERQUEUE failed");
}else{
LOGI("GetInterface SL_IID_BUFFERQUEUE success");
}
//設置回調函數 在播放隊列爲空的時候調用 第一次必須先放點數據進去
(*pcmQue)->RegisterCallback(pcmQue, pcmCall, 0);
//設置爲播放狀態
(*iplayer)->SetPlayState(iplayer, SL_PLAYSTATE_PLAYING);
//啓動隊列回調
(*pcmQue)->Enqueue(pcmQue, "", 1);
return env->NewStringUTF(hello.c_str());
}