本章內容主要是簡述音視頻硬編碼模塊和部分邏輯改動
初始化主要分爲三個部分,首先是使用之前需要確定是否使用硬編碼功能,默認是關閉的;然後就是常規操作設置surfaceTextureListener監聽;最後是切換相機的點擊事件和音視頻數據的回調
override fun init() {
//開啓視頻編碼
openVideoCoding()
//開啓音頻編碼
openAudioCoding()
//開啓surfaceTextureListener監聽
textureView.surfaceTextureListener = surfaceTextureListener
//切換相機
switchCamera.setOnClickListener {
switchCamera()
}
//音視頻編碼前的數據回調,與setOnCodingVideoCallback互斥
setOnVideoCallback(onVideoCallback)
//音視頻編碼數據回調,與setOnVideoCallback互斥
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
setOnCodingVideoCallback(onCodingVideoCallback)
}
}
音視頻數據的回調主要是替換前面的抽象方法,改造後使用更加簡單,以後不用自己去創建Session了,會默認使用預覽Surface和硬編碼的Surface,當然你也可以不開啓硬編碼添加自己定義的Surface
/**
* 創建一個Session
*/
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
override fun createCaptureSession(previewCaptureRequest: CaptureRequest.Builder,outputs:ArrayList<Surface>){
}
/**
* 音視頻編碼前的數據回調
*/
private val onVideoCallback = object :OnVideoCallback{
//發送老相機的視頻數據
override fun oldSendVideoData(data: ByteArray) {
println("")
}
//發送編碼前的音頻數據
override fun audioCoding(buffer: ByteArray, len: Int) {
println("")
}
}
/**
* 音視頻編碼數據回調
*/
private val onCodingVideoCallback = object :OnCodingVideoCallback{
//發送編碼後的視頻數據
override fun sendVideoPacket(videoOutputBuffers: ByteBuffer) {
println("")
}
//發送編碼後的音頻數據
override fun sendAudioPacket(byteBuffer: ByteBuffer) {
println("")
}
}
以上便是使用示例,很簡潔,文末爲基類完整代碼
import android.Manifest
import android.annotation.SuppressLint
import android.content.Context
import android.content.Intent
import android.content.pm.PackageManager
import android.graphics.ImageFormat
import android.graphics.SurfaceTexture
import android.hardware.Camera
import android.hardware.camera2.*
import android.media.*
import android.net.Uri
import android.os.Build
import android.os.Handler
import android.os.HandlerThread
import android.provider.Settings
import android.support.annotation.RequiresApi
import android.support.v4.content.ContextCompat
import android.support.v7.app.AlertDialog
import android.util.Size
import android.view.Surface
import android.view.TextureView
import kotlinx.coroutines.*
import org.jetbrains.annotations.NotNull
import java.nio.ByteBuffer
abstract class BaseVideoActivity: BaseActivity() {
private lateinit var mBackgroundThread: HandlerThread
private var mBackgroundHandler: Handler? = null
//攝像頭管理類
private lateinit var cameraManager: CameraManager
//攝像頭id列表
private lateinit var cameraIdList: Array<String>
//第幾個攝像頭
private var index = 0
//當前攝像頭id
private lateinit var cameraId: String
//Surface集合
private lateinit var outputs: ArrayList<Surface>
//當前攝像頭
private var cameraDevice: CameraDevice? = null
//Session
private var cameraCaptureSession: CameraCaptureSession? = null
//預覽Surface
private lateinit var previewSurface: Surface
//預覽CaptureRequest.Builder
private lateinit var previewCaptureRequest: CaptureRequest.Builder
//相機預覽分辨率
lateinit var previewSize: Size
private var onVideoCallback:OnVideoCallback? = null
interface OnVideoCallback {
//發送老相機的視頻數據
fun oldSendVideoData(data:ByteArray)
//發送編碼前的音頻數據
fun audioCoding(buffer: ByteArray, len: Int)
}
fun setOnVideoCallback(onVideoCallback: OnVideoCallback) {
this.onVideoCallback = onVideoCallback
}
private var onCodingVideoCallback:OnCodingVideoCallback? = null
interface OnCodingVideoCallback{
//發送編碼後的視頻數據
fun sendVideoPacket(videoOutputBuffers: ByteBuffer)
//發送編碼後的音頻數據
fun sendAudioPacket(byteBuffer: ByteBuffer)
}
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
fun setOnCodingVideoCallback(onCodingVideoCallback: OnCodingVideoCallback) {
this.onCodingVideoCallback = onCodingVideoCallback
}
/**
* 獲取Surface的回調
*/
val surfaceTextureListener = object : TextureView.SurfaceTextureListener {
//SurfaceTexture大小發生變化時調用
@SuppressLint("Recycle")
override fun onSurfaceTextureSizeChanged(
surfaceTexture: SurfaceTexture,
width: Int,
height: Int
) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
//獲取相機屬性類
val cameraCharacteristics = getCameraCharacteristics()
//設置預覽尺寸
previewSize = setPreviewSize(surfaceTexture, cameraCharacteristics)
previewSurface = Surface(surfaceTexture)
}
}
override fun onSurfaceTextureUpdated(surface: SurfaceTexture?) {
}
override fun onSurfaceTextureDestroyed(surfaceTexture: SurfaceTexture?): Boolean {
//關閉老相機API預覽
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) {
stopPreview()
}
previewSurface.release()
return true
}
@SuppressLint("Recycle")
override fun onSurfaceTextureAvailable(
surfaceTexture: SurfaceTexture,
width: Int,
height: Int
) {
//初始化AudioRecord
initAudioRecord()
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
//初始化Camera2
initCamera2()
//獲取相機屬性類
val cameraCharacteristics = getCameraCharacteristics()
//設置預覽尺寸
previewSize = setPreviewSize(surfaceTexture, cameraCharacteristics)
//獲取預覽Surface
previewSurface = Surface(surfaceTexture)
//是否開啓視頻編碼
if (isVideoCoding){
createSurfaceVideoEncoder()
}
//是否開啓音頻編碼
if (isAudioCoding){
createAudioEncoder()
}
//開啓攝像頭
openCamera()
}else{
oldSurfaceTexture = surfaceTexture
isAudioCoding = false
//開啓老相機API預覽
startPreview()
}
}
}
/**
* 初始化Camera2
*/
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
fun initCamera2() {
cameraManager = application.getSystemService(Context.CAMERA_SERVICE) as CameraManager
cameraIdList = cameraManager.cameraIdList
cameraId = cameraIdList[index]
}
/**
* 獲取CameraCharacteristics相機屬性類
*/
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
fun getCameraCharacteristics(): CameraCharacteristics {
return cameraManager.getCameraCharacteristics(cameraId)
}
/**
* 設置預設的預覽尺寸
*/
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
fun setPreviewSize(@NotNull surfaceTexture: SurfaceTexture, cameraCharacteristics: CameraCharacteristics): Size {
val aspectRatios = ArrayList<Float>()
aspectRatios.add(16.toFloat() / 9)
aspectRatios.add(4.toFloat() / 3)
aspectRatios.add(18.toFloat() / 9)
val size = getPreviewSize(cameraCharacteristics, aspectRatios)
surfaceTexture.setDefaultBufferSize(size.width, size.height)
return size
}
/**
* 獲取預覽尺寸
* 參數2:預覽尺寸比例的集合,按加入順序尋找預覽尺寸並返回
*/
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
fun getPreviewSize(@NotNull cameraCharacteristics: CameraCharacteristics, aspectRatios: ArrayList<Float>): Size {
for (aspectRatio in aspectRatios) {
val size = getPreviewSize(cameraCharacteristics, aspectRatio)
if (size != null) {
return size
}
}
return Size(1280, 720)
}
/**
* 獲取預覽尺寸
* 參數2:預覽尺寸比例,如4:3,16:9
*/
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
fun getPreviewSize(@NotNull cameraCharacteristics: CameraCharacteristics, aspectRatio: Float): Size? {
val streamConfigurationMap =
cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)
val supportedSizes = streamConfigurationMap.getOutputSizes(SurfaceTexture::class.java)
for (size in supportedSizes) {
if (size.width.toFloat() / size.height == aspectRatio) {
return size
}
}
return null
}
/**
* 打開攝像頭
*/
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
fun openCamera() {
if (ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED) {
cameraManager.openCamera(cameraId, callback, getBackgroundHandler())
} else {
val dialog = AlertDialog.Builder(this)
dialog.setTitle("開啓相機失敗").setMessage("缺少開啓相機的權限").setCancelable(false)
dialog.setNegativeButton("取消") { _, _ ->
}
dialog.setPositiveButton("授權") { _, _ ->
val intent = Intent(Settings.ACTION_APPLICATION_DETAILS_SETTINGS)
intent.data = Uri.parse("package:$packageName")
startActivity(intent)
}
dialog.show()
}
}
/**
* 打開攝像頭的回調
*/
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
private val callback = object : CameraDevice.StateCallback() {
//成功打開時的回調,可以得到一個 CameraDevice 實例
override fun onOpened(camera: CameraDevice) {
cameraDevice = camera
//創建一個預覽的CaptureRequest
previewCaptureRequest = camera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW)
// 設置預覽輸出的 Surface
previewCaptureRequest.addTarget(previewSurface)
if (isVideoCoding){
previewCaptureRequest.addTarget(inputSurface)
}
outputs = ArrayList()
outputs.add(previewSurface)
if (isVideoCoding){
outputs.add(inputSurface)
}
createCaptureSession(previewCaptureRequest,outputs)
//創建一個Session
camera.createCaptureSession(
outputs,
mSessionCallback,
getBackgroundHandler()
)
}
//當 camera 不再可用時的回調,通常在該方法中進行資源釋放的操作
override fun onDisconnected(camera: CameraDevice) {
}
// 當 camera 打開失敗時的回調,error 爲具體錯誤原因,通常在該方法中也要進行資源釋放的操作
override fun onError(camera: CameraDevice, error: Int) {
camera.close()
showError(error)
releaseBackgroundThread()
}
//相機關閉時回調
override fun onClosed(camera: CameraDevice) {
super.onClosed(camera)
cameraCaptureSession?.close()
}
}
/**
* 創建一個Session
*/
abstract fun createCaptureSession(previewCaptureRequest: CaptureRequest.Builder,outputs:ArrayList<Surface>)
/**
* 創建預覽Session的回調
*/
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
val mSessionCallback = object : CameraCaptureSession.StateCallback() {
override fun onConfigured(session: CameraCaptureSession) {
cameraCaptureSession = session
session.setRepeatingRequest(
previewCaptureRequest.build(),
captureCallback,
getBackgroundHandler()
)
//開始錄音
startRecording()
}
//創建失敗
override fun onConfigureFailed(session: CameraCaptureSession) {
showToast("創建Session失敗")
}
}
/**
* Session進度的回調
*/
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
val captureCallback = object : CameraCaptureSession.CaptureCallback() {
override fun onCaptureCompleted(
session: CameraCaptureSession,
request: CaptureRequest,
result: TotalCaptureResult
) {
super.onCaptureCompleted(session, request, result)
}
override fun onCaptureFailed(
session: CameraCaptureSession,
request: CaptureRequest,
failure: CaptureFailure
) {
super.onCaptureFailed(session, request, failure)
}
}
/**
* 切換攝像頭
*/
fun switchCamera() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
if (cameraDevice != null) {
if (index < cameraIdList.size - 1) {
index++
} else {
index = 0
}
cameraId = cameraIdList[index]
cameraDevice?.close()
openCamera()
} else {
showToast("請先開啓攝像頭")
}
}else{
oldSwitchCamera()
}
}
/**
* 獲取BackgroundHandler
*/
fun getBackgroundHandler(): Handler {
if (mBackgroundHandler == null) {
//設置攝像頭線程
mBackgroundThread = HandlerThread("CameraBackground")
mBackgroundThread.start()
mBackgroundHandler = Handler(mBackgroundThread.looper)
}
return mBackgroundHandler as Handler
}
/**
* 釋放線程資源
*/
fun releaseBackgroundThread() {
mBackgroundHandler?.removeCallbacksAndMessages(null)
mBackgroundHandler = null
mBackgroundThread.quitSafely()
mBackgroundThread.join()
}
/**
* 開啓攝像頭錯誤提示
*/
fun showError(error: Int) {
when (error) {
CameraDevice.StateCallback.ERROR_CAMERA_IN_USE -> {
showToast("當前相機設備已經在一個更高優先級的地方打開了")
}
CameraDevice.StateCallback.ERROR_MAX_CAMERAS_IN_USE -> {
showToast("已打開相機數量到上限了,無法再打開新的相機了")
}
CameraDevice.StateCallback.ERROR_CAMERA_DISABLED -> {
showToast("由於相關設備策略該相機設備無法打開")
}
CameraDevice.StateCallback.ERROR_CAMERA_DEVICE -> {
showToast("相機設備發生了一個致命錯誤")
}
CameraDevice.StateCallback.ERROR_CAMERA_SERVICE -> {
showToast("相機服務發生了一個致命錯誤")
}
}
}
override fun onDestroy() {
super.onDestroy()
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
cameraDevice?.close()
for (surface in outputs) {
surface.release()
}
if (isVideoCoding){
videoEncoder.stop()
videoEncoder.release()
}
if (isAudioCoding){
audioEncoder.stop()
audioEncoder.release()
}
endRecording()
releaseBackgroundThread()
}else{
stopPreview()
endRecording()
oldSurfaceTexture.release()
}
}
//老相機id
private var oldCameraId = Camera.CameraInfo.CAMERA_FACING_BACK
//老相機SurfaceTexture
private lateinit var oldSurfaceTexture:SurfaceTexture
//老相機數據存儲數組
private lateinit var oldBuffers:ByteArray
//老相機
private lateinit var oldCamera:Camera
//老相機預覽尺寸
lateinit var oldSize:Camera.Size
/**
* 老相機API開始預覽
*/
fun startPreview(){
// 打開攝像頭並將展示方向旋轉90度
oldCamera = Camera.open(oldCameraId)
oldCamera.setDisplayOrientation(90)
val parameters = oldCamera.parameters
// 選擇合適的預覽尺寸
val sizeList = parameters.supportedPreviewSizes
oldSize = getOldSize(sizeList)
parameters.previewFormat = ImageFormat.NV21
//設置預覽圖像參數
parameters.setPictureSize(oldSize.width,oldSize.height)
parameters.setPreviewSize(oldSize.width,oldSize.height)
oldCamera.parameters = parameters
oldCamera.setPreviewTexture(oldSurfaceTexture)
//獲取預覽數據
oldBuffers = ByteArray(oldSize.width * oldSize.height * 4)
oldCamera.addCallbackBuffer(oldBuffers)
oldCamera.setPreviewCallbackWithBuffer(previewCallback)
oldCamera.startPreview()
startRecording()
}
/**
* 獲取老相機預覽數據回調
*/
private val previewCallback = Camera.PreviewCallback { data, camera ->
camera?.addCallbackBuffer(oldBuffers)
onVideoCallback?.oldSendVideoData(data)
}
/**
* 停止預覽
*/
fun stopPreview(){
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) {
oldCamera.stopPreview()
oldCamera.release()
}
}
/**
* 老camera切換攝像頭
*/
private fun oldSwitchCamera(){
oldCameraId = if (oldCameraId == Camera.CameraInfo.CAMERA_FACING_BACK){
Camera.CameraInfo.CAMERA_FACING_FRONT
}else{
Camera.CameraInfo.CAMERA_FACING_BACK
}
stopPreview()
startPreview()
}
/**
* 獲取老相機API的預覽大小
*/
private fun getOldSize(sizeList:List<Camera.Size>):Camera.Size{
val aspectRatios = ArrayList<Float>()
aspectRatios.add(16.toFloat() / 9)
aspectRatios.add(4.toFloat() / 3)
aspectRatios.add(18.toFloat() / 9)
for (aspectRatio in aspectRatios){
for (size in sizeList){
if (size.width.toFloat() / size.height == aspectRatio) {
return size
}
}
}
return sizeList[0]
}
//音頻編碼器
private lateinit var audioEncoder:MediaCodec
//BufferInfo配置
private lateinit var bufferInfo:MediaCodec.BufferInfo
//視頻編碼器
private lateinit var videoEncoder: MediaCodec
//視頻編碼器輸入Surface
private lateinit var inputSurface:Surface
//是否開啓視頻編碼
private var isVideoCoding:Boolean = false
//開啓視頻編碼
fun openVideoCoding(){
isVideoCoding = true
}
//是否開啓音頻編碼
private var isAudioCoding:Boolean = false
//開啓音頻編碼
fun openAudioCoding(){
isAudioCoding = true
}
/**
* 創建Camera2編碼器,Surface爲輸入源
*/
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
fun createSurfaceVideoEncoder(){
//視頻編碼器
videoEncoder = MediaCodec.createEncoderByType("video/avc")
// 創建視頻MediaFormat
val videoFormat = MediaFormat.createVideoFormat("video/avc", previewSize.width, previewSize.height)
// 指定編碼器顏色格式
videoFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface)
// 僅編碼器指定比特率
videoFormat.setInteger(MediaFormat.KEY_BIT_RATE, 480000)//4* 1024
// 編碼器必須指定幀率
videoFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 25)
// 指定關鍵幀時間間隔
// videoFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 5)
//BITRATE_MODE_CBR輸出碼率恆定,BITRATE_MODE_CQ保證圖像質量,BITRATE_MODE_VBR圖像複雜則碼率高,圖像簡單則碼率低
videoFormat.setInteger(MediaFormat.KEY_BITRATE_MODE,MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_VBR)
videoFormat.setInteger(MediaFormat.KEY_COMPLEXITY,MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_VBR)
videoEncoder.configure(videoFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
//獲取輸入Surface
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
inputSurface = MediaCodec.createPersistentInputSurface()
videoEncoder.setInputSurface(inputSurface)
videoEncoder.setCallback(videoEncoderCallback,getBackgroundHandler())
} else {
inputSurface = videoEncoder.createInputSurface()
videoEncoder.setCallback(videoEncoderCallback)
}
videoEncoder.start()
}
/**
* 硬解碼的異步回調
*/
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
private val videoEncoderCallback = object : MediaCodec.Callback() {
override fun onOutputBufferAvailable(
codec: MediaCodec,
index: Int,
info: MediaCodec.BufferInfo
) {
when {
index == MediaCodec.INFO_TRY_AGAIN_LATER ->
showToast("超時")
index >= 0 -> {
val encoderOutputBuffers = videoEncoder.getOutputBuffer(index)
if (encoderOutputBuffers != null){
onCodingVideoCallback?.sendVideoPacket(encoderOutputBuffers)
}
videoEncoder.releaseOutputBuffer(index, true)
}
}
}
override fun onInputBufferAvailable(codec: MediaCodec, index: Int) {
}
override fun onOutputFormatChanged(codec: MediaCodec, format: MediaFormat) {
}
override fun onError(codec: MediaCodec, e: MediaCodec.CodecException) {
showToast("${e.message}")
}
}
// 採樣率
private val sampleRateInHz = 44100
// 音頻通道 立體聲:
val stereo = AudioFormat.CHANNEL_IN_STEREO
private lateinit var audioRecord: AudioRecord
//audioRecord能接受的最小的buffer大小
private var bufferSizeInBytes: Int = 0
//錄音線程
private var recordingJob: Job? = null
/**
* 初始化AudioRecord
*/
fun initAudioRecord(channelConfig: Int = AudioFormat.CHANNEL_IN_MONO) {
//audioRecord能接受的最小的buffer大小
bufferSizeInBytes = AudioRecord.getMinBufferSize(sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT)
audioRecord = AudioRecord(MediaRecorder.AudioSource.MIC, sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT, bufferSizeInBytes)
}
/**
* 開始錄音
*/
fun startRecording() {
recordingJob = GlobalScope.launch(Dispatchers.IO) {
if (bufferSizeInBytes > 0) {
audioRecord.startRecording()
while (isActive) {
val buffer = ByteArray(bufferSizeInBytes)
val len = audioRecord.read(buffer, 0, buffer.size)
if (len > 0) {
if (!isAudioCoding){
onVideoCallback?.audioCoding(buffer,len)
}else{
startAudioEncoder(buffer,len)
}
}
}
} else {
launch(Dispatchers.Main) {
showToast("請先初始化AudioRecord類")
}
}
}
}
/**
* 結束錄音
*/
fun endRecording() {
if (audioRecord.recordingState == AudioRecord.RECORDSTATE_RECORDING) {
audioRecord.stop()
}
if (audioRecord.state == AudioRecord.STATE_INITIALIZED) {
audioRecord.release()
}
recordingJob?.cancel()
}
/**
* 配置音頻編碼格式
*/
fun createAudioEncoder(){
bufferInfo = MediaCodec.BufferInfo()
//音頻編碼器
audioEncoder = MediaCodec.createEncoderByType("audio/mp4a-latm")
// 創建音頻MediaFormat,參數2:採樣率,參數3:通道
val audioFormat = MediaFormat.createAudioFormat("audio/mp4a-latm", 44100, 1)
// 僅編碼器指定比特率
audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, 96000)
//可選的,輸入數據緩衝區的最大大小
audioFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, bufferSizeInBytes)
audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC)
audioEncoder.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
audioEncoder.start()
}
/**
* 開始音頻編碼
*/
private fun startAudioEncoder(buffer:ByteArray,len:Int){
if (isVideoCoding){
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP){
try {
//編碼
val inputIndex = audioEncoder.dequeueInputBuffer(0)
if (inputIndex >= 0){
val byteBuffer = audioEncoder.getInputBuffer(inputIndex)
if (byteBuffer != null){
byteBuffer.clear()
byteBuffer.put(buffer)
byteBuffer.limit(len)// 設定上限值
audioEncoder.queueInputBuffer(inputIndex, 0, len, System.nanoTime(), 0); // 第三個參數爲時間戳,這裏是使用當前
}
}
val outputIndex = audioEncoder.dequeueOutputBuffer(bufferInfo,0 )
if (outputIndex >= 0){
val byteBuffer = audioEncoder.getOutputBuffer(outputIndex)
if (byteBuffer != null){
//音頻編碼
onCodingVideoCallback?.sendAudioPacket(byteBuffer)
}
audioEncoder.releaseOutputBuffer(outputIndex,false)
}
}catch (e:IllegalStateException){
e.printStackTrace()
}
}
}
}
}