Stagefright AwesomePlayer 播放流程的函數調用

Stagefright AwesomePlayer 播放流程的函數調用

android多媒體框架中, stagefright其實是AwesomePlayer的代理, 對AwesomePlayer進行了一層淺封裝, 實際功能由AwesomePlayer完成, MediaPlayer大致流程、StagefrightPlayer、AwesomePlayer相關調用:
mediaPlayer.setDataSource(path);
mediaPlayer.prepare();
mediaPlayer.start();

StagefrightPlayer.cpp

AwesomePlayer *mPlayer;
...
status_t StagefrightPlayer::setDataSource(  
        const char *url, const KeyedVector<String8, String8> *headers) {  
    return mPlayer->setDataSource(url, headers);  
}  
status_t StagefrightPlayer::prepare() {  
    return mPlayer->prepare();  
}  
status_t StagefrightPlayer::start() {  
    ALOGV("start"); 
    return mPlayer->play();  
}  
status_t StagefrightPlayer::stop() {  
    ALOGV("stop"); 
    return pause();  // what's the difference?  
}  
status_t StagefrightPlayer::pause() {  
    ALOGV("pause");  
    return mPlayer->pause();  
} 

AwesomePlayer 和 TimedEventQueue:

這裏寫圖片描述
TimedEventQueue是AwesomePlayer的時間事件隊列,也是事件調度器。通過TimedEventQueue::postTimedEvent(),按照延時的優先順序把事件放到TimedEventQueue的隊列之中。然後由其內部的線程會從隊列中依次取出這些事件,執行回調事件的功能函數

TimedEventQueue.h

struct TimedEventQueue {
    typedef int32_t event_id;
    struct Event : public RefBase {
        Event()
            : mEventID(0) {
        }
        virtual ~Event() {}
        event_id eventID() {
            return mEventID;
        }
    protected:
        virtual void fire(TimedEventQueue *queue, int64_t now_us) = 0;
    private:
        friend class TimedEventQueue;
        event_id mEventID;
        void setEventID(event_id id) {
            mEventID = id;
        }
        Event(const Event &);
        Event &operator=(const Event &);
    };
    TimedEventQueue();
    ~TimedEventQueue();
    void start();
    void stop(bool flush = false);
    event_id postEvent(const sp<Event> &event);
    event_id postEventToBack(const sp<Event> &event);
    event_id postEventWithDelay(const sp<Event> &event, int64_t delay_us);
    event_id postTimedEvent(const sp<Event> &event, int64_t realtime_us);
    bool cancelEvent(event_id id);
    void cancelEvents(bool (*predicate)(void *cookie, const sp<Event> &event),void *cookie,bool stopAfterFirstMatch = false);
    static int64_t getRealTimeUs();
private:
    struct QueueItem {
        sp<Event> event;  //事件
        int64_t realtime_us;  //時間
    };
    struct StopEvent : public TimedEventQueue::Event {
        virtual void fire(TimedEventQueue *queue, int64_t now_us) {
            queue->mStopped = true;
        }
    };
    pthread_t mThread; //獨立線程,在TimedEventQueue::start被創建
    List<QueueItem> mQueue; //mQueue時間事件隊列
    Mutex mLock;
    Condition mQueueNotEmptyCondition;
    Condition mQueueHeadChangedCondition;
    event_id mNextEventID;
    bool mRunning;
    bool mStopped;
    static void *ThreadWrapper(void *me);
    void threadEntry(); 
    sp<Event> removeEventFromQueue_l(event_id id);
    TimedEventQueue(const TimedEventQueue &);
    TimedEventQueue &operator=(const TimedEventQueue &);
};

TimedEventQueue.cpp
void TimedEventQueue::start() {
if (mRunning) {
return;
}
mStopped = false;
pthread_attr_t attr;
pthread_attr_init(&attr);
pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
pthread_create(&mThread, &attr, ThreadWrapper, this);
pthread_attr_destroy(&attr);
mRunning = true;
}

void TimedEventQueue::stop(bool flush) {
if (!mRunning) {
return;
}
if (flush) {
postEventToBack(new StopEvent);
} else {
postTimedEvent(new StopEvent, INT64_MIN);
}
void *dummy;
pthread_join(mThread, &dummy);
mQueue.clear();
mRunning = false;
}

AwesomePlayer

AwesomePlayer是通過event事件調度來實現這些功能之間的驅動和調用的
AwesomeEvent將想要調用的功能函數與事件捆綁, 繼承於TimedEventQueue::Event,
AwesomePlayer只是通過其擁有的TimedEventQueue實例,調用TimedEventQueue::postTimedEvent(), 按照延時的優先順序把事件放到TimedEventQueue的隊列之中。
TimedEventQueue start之後,自己內部的線程會從隊列中依次取出這些事件,然後通過event->fire回調事件的功能函數。實現具體的功能。

AwesomePlayer.h:

...
   //OMXClient是android中openmax的入口 
   OMXClient mClient;
   //事件調度器:mQueue是AwesomePlayer的以時間爲基礎的事件隊列
   TimedEventQueue mQueue;
   bool mQueueStarted;
   wp<MediaPlayerBase> mListener;
   bool mUIDValid;
   uid_t mUID;
   sp<ANativeWindow> mNativeWindow;
   sp<MediaPlayerBase::AudioSink> mAudioSink;
   SystemTimeSource mSystemTimeSource;
   TimeSource *mTimeSource;
   String8 mUri;
   KeyedVector<String8, String8> mUriHeaders;
   sp<DataSource> mFileSource;
   sp<MediaSource> mVideoTrack;
   sp<MediaSource> mVideoSource;
   sp<AwesomeRenderer> mVideoRenderer;
   bool mVideoRendererIsPreview;
   sp<MediaSource> mAudioTrack;
   sp<MediaSource> mAudioSource;
   ...
   sp<TimedEventQueue::Event> mVideoEvent;
   bool mVideoEventPending;
   sp<TimedEventQueue::Event> mStreamDoneEvent;
   bool mStreamDoneEventPending;
   sp<TimedEventQueue::Event> mBufferingEvent;
   bool mBufferingEventPending;
   sp<TimedEventQueue::Event> mCheckAudioStatusEvent;
   bool mAudioStatusEventPending;
   sp<TimedEventQueue::Event> mVideoLagEvent;
   bool mVideoLagEventPending;

   sp<TimedEventQueue::Event> mAsyncPrepareEvent;
...

AwesomePlayer.cpp

struct AwesomeEvent : public TimedEventQueue::Event {
    AwesomeEvent(
            AwesomePlayer *player,
            void (AwesomePlayer::*method)()) : mPlayer(player),
          mMethod(method) {
    }
protected:
    virtual ~AwesomeEvent() {}
    virtual void fire(TimedEventQueue *queue, int64_t /* now_us */) {
        (mPlayer->*mMethod)();
    }
private:
    AwesomePlayer *mPlayer;
    void (AwesomePlayer::*mMethod)();
    AwesomeEvent(const AwesomeEvent &);
    AwesomeEvent &operator=(const AwesomeEvent &);
};

AwesomePlayer::AwesomePlayer()
    : mQueueStarted(false),
      mUIDValid(false),
      mTimeSource(NULL),
      mVideoRendererIsPreview(false),
      mAudioPlayer(NULL),
      mDisplayWidth(0),
      mDisplayHeight(0),
      mFlags(0),
      mExtractorFlags(0),
      mVideoBuffer(NULL),
      mDecryptHandle(NULL),
      mLastVideoTimeUs(-1),
      mTextPlayer(NULL) {
    CHECK_EQ(mClient.connect(), (status_t)OK);
    DataSource::RegisterDefaultSniffers();
    //AwesomePlayer定義多類型的event事件,並把和這些事件相關的功能函數綁定起來
    mVideoEvent = new AwesomeEvent(this, &AwesomePlayer::onVideoEvent);
    mVideoEventPending = false;
    mStreamDoneEvent = new AwesomeEvent(this, &AwesomePlayer::onStreamDone);
    mStreamDoneEventPending = false;
    mBufferingEvent = new AwesomeEvent(this, &AwesomePlayer::onBufferingUpdate);
    mBufferingEventPending = false;
    mVideoLagEvent = new AwesomeEvent(this, &AwesomePlayer::onVideoLagUpdate);
    mVideoEventPending = false;
    mCheckAudioStatusEvent = new AwesomeEvent(
            this, &AwesomePlayer::onCheckAudioStatus);
    mAudioStatusEventPending = false;
#ifdef SLSI_ULP_AUDIO
    mLibAudioHandle = NULL;
    mIsULPAudio = false;
    mUseULPAudio = false;
#endif
    reset();
}

1、prepare()函數實現的調用流程:
prepare()=>prepare_l()=>prepareAsync_l()=>mQueue.postEvent(mAsyncPrepareEvent)

status_t AwesomePlayer::prepare() {
    Mutex::Autolock autoLock(mLock);
    return prepare_l();
}

status_t AwesomePlayer::prepare_l() {
    if (mFlags & PREPARED) {
        return OK;
    }
    if (mFlags & PREPARING) {
        return UNKNOWN_ERROR;
    }
    mIsAsyncPrepare = false;
    status_t err = prepareAsync_l();
    if (err != OK) {
        return err;
    }
    while (mFlags & PREPARING) {
        mPreparedCondition.wait(mLock);
    }
    return mPrepareResult;
}
status_t AwesomePlayer::prepareAsync_l() {
    if (mFlags & PREPARING) {
        return UNKNOWN_ERROR;  // async prepare already pending
    }
    if (!mQueueStarted) {
        mQueue.start();
        mQueueStarted = true;
    }
    modifyFlags(PREPARING, SET);
    mAsyncPrepareEvent = new AwesomeEvent(
            this, &AwesomePlayer::onPrepareAsyncEvent);
    mQueue.postEvent(mAsyncPrepareEvent);
    return OK;
}

2、play()函數實現的調用流程:
play() =>play_l()=>postVideoEvent_l():
postVideoEvent_l()內部調用mQueue.postEventWithDelay(mVideoEvent, delayUs < 0 ? 10000 : delayUs)把mVideoEvent事件放入mQueue時間事件隊列,由mQueue中的線程自行調度。
而mVideoEvent = new AwesomeEvent(this, &AwesomePlayer::onVideoEvent);
mVideoEvent綁定onVideoEvent()函數事件,onVideoEvent()內部調用mVideoSource->read(&mVideoBuffer, &options)解碼,onVideoEvent()內部調用mVideoRenderer->render(mVideoBuffer)渲染解碼數據,onVideoEvent()內部會調用postVideoEvent_l()從
而實現循環讀取、解碼、播放

status_t AwesomePlayer::play() {
    Mutex::Autolock autoLock(mLock);
    modifyFlags(CACHE_UNDERRUN, CLEAR);
    return play_l();
}
status_t AwesomePlayer::play_l() {
    modifyFlags(SEEK_PREVIEW, CLEAR);
    if (mFlags & PLAYING) {
        return OK;
    }
    if (!(mFlags & PREPARED)) {
        status_t err = prepare_l();
        if (err != OK) {
            return err;
        }
    }
    modifyFlags(PLAYING, SET);
    modifyFlags(FIRST_FRAME, SET);
    ...
    if (mVideoSource != NULL) {
        // Kick off video playback
        postVideoEvent_l();
        if (mAudioSource != NULL && mVideoSource != NULL) {
            postVideoLagEvent_l();
        }
    }
    return OK;
}
void AwesomePlayer::postVideoEvent_l(int64_t delayUs) {
    if (mVideoEventPending) {
        return;
    }
    mVideoEventPending = true;
    //把mVideoEvent事件放入mQueue時間事件隊列,由mQueue中的線程進行調度
    mQueue.postEventWithDelay(mVideoEvent, delayUs < 0 ? 10000 : delayUs);
}

void AwesomePlayer::onVideoEvent() {
    Mutex::Autolock autoLock(mLock);
    if (!mVideoEventPending) {
        return;
    }
    mVideoEventPending = false;
    if (mSeeking != NO_SEEK) {
        if (mVideoBuffer) {
            mVideoBuffer->release();
            mVideoBuffer = NULL;
        }
        if (mSeeking == SEEK && isStreamingHTTP() && mAudioSource != NULL
                && !(mFlags & SEEK_PREVIEW)) {
            if (mAudioPlayer != NULL && (mFlags & AUDIO_RUNNING)) {
                mAudioPlayer->pause();
                modifyFlags(AUDIO_RUNNING, CLEAR);
            }
            mAudioSource->pause();
        }
    }
    if (!mVideoBuffer) {
        MediaSource::ReadOptions options;
        if (mSeeking != NO_SEEK) {
            LOGV("seeking to %lld us (%.2f secs)", mSeekTimeUs, mSeekTimeUs / 1E6);
            options.setSeekTo(mSeekTimeUs,
                    mSeeking == SEEK_VIDEO_ONLY
                        ? MediaSource::ReadOptions::SEEK_NEXT_SYNC
                        : MediaSource::ReadOptions::SEEK_CLOSEST_SYNC);
        }
        for (;;) {
            status_t err = mVideoSource->read(&mVideoBuffer, &options);
            options.clearSeekTo();
            if (err != OK) {
                CHECK(mVideoBuffer == NULL);
                if (err == INFO_FORMAT_CHANGED) {
                    LOGV("VideoSource signalled format change.");
                    notifyVideoSize_l();
                    if (mVideoRenderer != NULL) {
                        mVideoRendererIsPreview = false;
                        initRenderer_l();
                    }
                    continue;
                }
                if (mSeeking != NO_SEEK) {
                    LOGV("video stream ended while seeking!");
                }
                finishSeekIfNecessary(-1);
                if (mAudioPlayer != NULL
                        && !(mFlags & (AUDIO_RUNNING | SEEK_PREVIEW))) {
                    startAudioPlayer_l();
                }
                modifyFlags(VIDEO_AT_EOS, SET);
                postStreamDoneEvent_l(err);
                return;
            }
            if (mVideoBuffer->range_length() == 0) {
                mVideoBuffer->release();
                mVideoBuffer = NULL;
                continue;
            }
            break;
        }
        {
            Mutex::Autolock autoLock(mStatsLock);
            ++mStats.mNumVideoFramesDecoded;
        }
    }
    int64_t timeUs;
    CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs));
    mLastVideoTimeUs = timeUs;
    if (mSeeking == SEEK_VIDEO_ONLY) {
        if (mSeekTimeUs > timeUs) {
            LOGI("XXX mSeekTimeUs = %lld us, timeUs = %lld us",
                 mSeekTimeUs, timeUs);
        }
    }
    {
        Mutex::Autolock autoLock(mMiscStateLock);
        mVideoTimeUs = timeUs;
    }
    SeekType wasSeeking = mSeeking;
    finishSeekIfNecessary(timeUs);
    if (mAudioPlayer != NULL && !(mFlags & (AUDIO_RUNNING | SEEK_PREVIEW))) {
        status_t err = startAudioPlayer_l();
        if (err != OK) {
            LOGE("Starting the audio player failed w/ err %d", err);
            return;
        }
    }
    if ((mFlags & TEXTPLAYER_STARTED) && !(mFlags & (TEXT_RUNNING | SEEK_PREVIEW))) {
        mTextPlayer->resume();
        modifyFlags(TEXT_RUNNING, SET);
    }
    TimeSource *ts =
        ((mFlags & AUDIO_AT_EOS) || !(mFlags & AUDIOPLAYER_STARTED))
            ? &mSystemTimeSource : mTimeSource;

    if (mFlags & FIRST_FRAME) {
        modifyFlags(FIRST_FRAME, CLEAR);
        mSinceLastDropped = 0;
        mTimeSourceDeltaUs = ts->getRealTimeUs() - timeUs;
    }

    int64_t realTimeUs, mediaTimeUs;
    if (!(mFlags & AUDIO_AT_EOS) && mAudioPlayer != NULL
        && mAudioPlayer->getMediaTimeMapping(&realTimeUs, &mediaTimeUs))   {
        mTimeSourceDeltaUs = realTimeUs - mediaTimeUs;
    }

    if (wasSeeking == SEEK_VIDEO_ONLY) {
        int64_t nowUs = ts->getRealTimeUs() - mTimeSourceDeltaUs;
        int64_t latenessUs = nowUs - timeUs;
        if (latenessUs > 0) {
            LOGI("after SEEK_VIDEO_ONLY we're late by %.2f secs", latenessUs / 1E6);
        }
    }
    if (wasSeeking == NO_SEEK) {
        // Let's display the first frame after seeking right away.
        int64_t nowUs = ts->getRealTimeUs() - mTimeSourceDeltaUs;
        int64_t latenessUs = nowUs - timeUs;
        if (latenessUs > 500000ll
                && mAudioPlayer != NULL
                && mAudioPlayer->getMediaTimeMapping(
                    &realTimeUs, &mediaTimeUs)) {
            LOGI("we're much too late (%.2f secs), video skipping ahead",
                 latenessUs / 1E6);
            mVideoBuffer->release();
            mVideoBuffer = NULL;

            mSeeking = SEEK_VIDEO_ONLY;
            mSeekTimeUs = mediaTimeUs;
            postVideoEvent_l();
            return;
        }
        if (latenessUs > 40000) {
            // We're more than 40ms late.
            LOGV("we're late by %lld us (%.2f secs)",
                 latenessUs, latenessUs / 1E6);
            if (!(mFlags & SLOW_DECODER_HACK)
                    || mSinceLastDropped > FRAME_DROP_FREQ)
            {
                LOGV("we're late by %lld us (%.2f secs) dropping "
                     "one after %d frames",
                     latenessUs, latenessUs / 1E6, mSinceLastDropped);
                mSinceLastDropped = 0;
                mVideoBuffer->release();
                mVideoBuffer = NULL;
                {
                    Mutex::Autolock autoLock(mStatsLock);
                    ++mStats.mNumVideoFramesDropped;
                }
                postVideoEvent_l();
                return;
            }
        }
        if (latenessUs < -10000) {
            // We're more than 10ms early.
            postVideoEvent_l(10000);
            return;
        }
    }
    if ((mNativeWindow != NULL)
            && (mVideoRendererIsPreview || mVideoRenderer == NULL)) {
        mVideoRendererIsPreview = false;
        initRenderer_l();
    }
    if (mVideoRenderer != NULL) {
        mSinceLastDropped++;
        //解碼後數據由mVideoRenderer刷新
        mVideoRenderer->render(mVideoBuffer);
    }
    mVideoBuffer->release();
    mVideoBuffer = NULL;
    if (wasSeeking != NO_SEEK && (mFlags & SEEK_PREVIEW)) {
        modifyFlags(SEEK_PREVIEW, CLEAR);
        return;
    }
    postVideoEvent_l(); 
}

3、mVideoSource->read(&mVideoBuffer, &options)讀取數據
mVideoSource->read(&mVideoBuffer, &options)具體是調用OMXCodec.read來讀取數據。而OMXCodec.read主要分兩步來實現數據的讀取:
(1) 、通過調用drainInputBuffers()對mPortBuffers[kPortIndexInput]進行填充,這一步完成 parse。由OpenMAX從數據源把demux後的數據讀取到輸入緩衝區,作爲OpenMAX的輸入。
drainInputBuffers()=>OMXCodec::drainInputBuffer(BufferInfo *info)內部調用mOMX->emptyBuffer(
mNode, info->mBuffer, 0, offset,
flags, timestampUs);
IOMX.cpp:
virtual status_t emptyBuffer(
node_id node,
buffer_id buffer,
OMX_U32 range_offset, OMX_U32 range_length,
OMX_U32 flags, OMX_TICKS timestamp) {
Parcel data, reply;
data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
data.writeIntPtr((intptr_t)node);
data.writeIntPtr((intptr_t)buffer);
data.writeInt32(range_offset);
data.writeInt32(range_length);
data.writeInt32(flags);
data.writeInt64(timestamp);
remote()->transact(EMPTY_BUFFER, data, &reply);
return reply.readInt32();
}
(2)、通過fillOutputBuffers()對mPortBuffers[kPortIndexOutput]進行填充,這一步完成 decode。由OpenMAX對輸入緩衝區中的數據進行解碼,然後把解碼後可以顯示的視頻數據輸出到輸出緩衝區
fillOutputBuffers()=>OMXCodec::fillOutputBuffer(BufferInfo *info)內部調用
mOMX->fillBuffer(mNode, info->mBuffer);
IOMX.cpp:
virtual status_t fillBuffer(node_id node, buffer_id buffer) {
Parcel data, reply;
data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
data.writeIntPtr((intptr_t)node);
data.writeIntPtr((intptr_t)buffer);
remote()->transact(FILL_BUFFER, data, &reply);
return reply.readInt32();
}

MediaBuffer.h

class MediaBuffer {
public:
    // The underlying data remains the responsibility of the caller!
    MediaBuffer(void *data, size_t size);
    MediaBuffer(size_t size);
    MediaBuffer(const sp<GraphicBuffer>& graphicBuffer);
    MediaBuffer(const sp<ABuffer> &buffer);
    // Decrements the reference count and returns the buffer to its
    // associated MediaBufferGroup if the reference count drops to 0.
    void release();
    // Increments the reference count.
    void add_ref();
    void *data() const;
    size_t size() const;
    size_t range_offset() const;
    size_t range_length() const;
    void set_range(size_t offset, size_t length);
    sp<GraphicBuffer> graphicBuffer() const;
    sp<MetaData> meta_data();
    // Clears meta data and resets the range to the full extent.
    void reset();
    void setObserver(MediaBufferObserver *group);
    // Returns a clone of this MediaBuffer increasing its reference count.
    // The clone references the same data but has its own range and
    // MetaData.
    MediaBuffer *clone();
    int refcount() const;
protected:
    virtual ~MediaBuffer();
private:
    friend class MediaBufferGroup;
    friend class OMXDecoder;
    // For use by OMXDecoder, reference count must be 1, drop reference
    // count to 0 without signalling the observer.
    void claim();
    MediaBufferObserver *mObserver;
    MediaBuffer *mNextBuffer;
    int mRefCount;
    void *mData;
    size_t mSize, mRangeOffset, mRangeLength;
    sp<GraphicBuffer> mGraphicBuffer;
    sp<ABuffer> mBuffer;
    bool mOwnsData;
    sp<MetaData> mMetaData;
    MediaBuffer *mOriginal;
    void setNextBuffer(MediaBuffer *buffer);
    MediaBuffer *nextBuffer();
    MediaBuffer(const MediaBuffer &);
    MediaBuffer &operator=(const MediaBuffer &);
};

OMXCodec.h

struct OMXCodec : public MediaSource,
                  public MediaBufferObserver {
    ...      
    static sp<MediaSource> Create(
            const sp<IOMX> &omx,
            const sp<MetaData> &meta, bool createEncoder,
            const sp<MediaSource> &source,
            const char *matchComponentName = NULL,
            uint32_t flags = 0,
            const sp<ANativeWindow> &nativeWindow = NULL);      
   ...  
   virtual status_t read(
            MediaBuffer **buffer, const ReadOptions *options = NULL);
   ...
   // A list of indices into mPortStatus[kPortIndexOutput] filled with data.
    List<size_t> mFilledBuffers;
    Condition mBufferFilled;
   ...
   struct BufferInfo {
        IOMX::buffer_id mBuffer;
        BufferStatus mStatus;
        sp<IMemory> mMem;
        size_t mSize;
        void *mData;
        MediaBuffer *mMediaBuffer;
    };
...
};

OMXCodec.cpp

status_t OMXCodec::read(
        MediaBuffer **buffer, const ReadOptions *options) {
    status_t err = OK;
    *buffer = NULL;
    Mutex::Autolock autoLock(mLock);
    if (mState != EXECUTING && mState != RECONFIGURING) {
        return UNKNOWN_ERROR;
    }
    bool seeking = false;
    int64_t seekTimeUs;
    ReadOptions::SeekMode seekMode;
    if (options && options->getSeekTo(&seekTimeUs, &seekMode)) {
        seeking = true;
    }
    if (mInitialBufferSubmit) {
        mInitialBufferSubmit = false;
        if (seeking) {
            CHECK(seekTimeUs >= 0);
            mSeekTimeUs = seekTimeUs;
            mSeekMode = seekMode;
            // There's no reason to trigger the code below, there's
            // nothing to flush yet.
            seeking = false;
            mPaused = false;
        }
        drainInputBuffers();
        if (mState == EXECUTING) {
            // Otherwise mState == RECONFIGURING and this code will trigger
            // after the output port is reenabled.
            fillOutputBuffers();
        }
    }
    if (seeking) {
        while (mState == RECONFIGURING) {
            if ((err = waitForBufferFilled_l()) != OK) {
                return err;
            }
        }
        if (mState != EXECUTING) {
            return UNKNOWN_ERROR;
        }
        CODEC_LOGV("seeking to %lld us (%.2f secs)", seekTimeUs, seekTimeUs / 1E6);
        mSignalledEOS = false;
        CHECK(seekTimeUs >= 0);
        mSeekTimeUs = seekTimeUs;
        mSeekMode = seekMode;
        mFilledBuffers.clear();
        CHECK_EQ((int)mState, (int)EXECUTING);
        bool emulateInputFlushCompletion = !flushPortAsync(kPortIndexInput);
        bool emulateOutputFlushCompletion = !flushPortAsync(kPortIndexOutput);
        if (emulateInputFlushCompletion) {
            onCmdComplete(OMX_CommandFlush, kPortIndexInput);
        }
        if (emulateOutputFlushCompletion) {
            onCmdComplete(OMX_CommandFlush, kPortIndexOutput);
        }
        while (mSeekTimeUs >= 0) {
            if ((err = waitForBufferFilled_l()) != OK) {
                return err;
            }
        }
    }
    while (mState != ERROR && !mNoMoreOutputData && mFilledBuffers.empty()) {
        if ((err = waitForBufferFilled_l()) != OK) {
            return err;
        }
    }
    if (mState == ERROR) {
        return UNKNOWN_ERROR;
    }
    if (mFilledBuffers.empty()) {
        return mSignalledEOS ? mFinalStatus : ERROR_END_OF_STREAM;
    }
    if (mOutputPortSettingsHaveChanged) {
        mOutputPortSettingsHaveChanged = false;
        return INFO_FORMAT_CHANGED;
    }
    size_t index = *mFilledBuffers.begin();
    mFilledBuffers.erase(mFilledBuffers.begin());
    BufferInfo *info = &mPortBuffers[kPortIndexOutput].editItemAt(index);
    CHECK_EQ((int)info->mStatus, (int)OWNED_BY_US);
    info->mStatus = OWNED_BY_CLIENT;
    info->mMediaBuffer->add_ref();
    *buffer = info->mMediaBuffer;
    return OK;
}

void OMXCodec::drainInputBuffers() {
    CHECK(mState == EXECUTING || mState == RECONFIGURING);
    if (mFlags & kUseSecureInputBuffers) {
        Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexInput];
        for (size_t i = 0; i < buffers->size(); ++i) {
            if (!drainAnyInputBuffer()
                    || (mFlags & kOnlySubmitOneInputBufferAtOneTime)) {
                break;
            }
        }
    } else {
        Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexInput];
        for (size_t i = 0; i < buffers->size(); ++i) {
            BufferInfo *info = &buffers->editItemAt(i);
            if (info->mStatus != OWNED_BY_US) {
                continue;
            }
            // OMXCodec::drainInputBuffer(BufferInfo *info)
            // 內部
            if (!drainInputBuffer(info)) { 
                break;
            }
            if (mFlags & kOnlySubmitOneInputBufferAtOneTime) {
                break;
            }
        }
    }
}

void OMXCodec::fillOutputBuffers() {
    CHECK_EQ((int)mState, (int)EXECUTING);
    // This is a workaround for some decoders not properly reporting
    // end-of-output-stream. If we own all input buffers and also own
    // all output buffers and we already signalled end-of-input-stream,
    // the end-of-output-stream is implied.
    if (mSignalledEOS
            && countBuffersWeOwn(mPortBuffers[kPortIndexInput])
                == mPortBuffers[kPortIndexInput].size()
            && countBuffersWeOwn(mPortBuffers[kPortIndexOutput])
                == mPortBuffers[kPortIndexOutput].size()) {
#ifdef USE_ALP_AUDIO
        /* SEC mp3 decoder should be finished by EOS flag in output buffer. */
        /* Do not apply this workaround */
        if (strcmp(mComponentName, "OMX.SEC.MP3.Decoder") != 0) {
#endif
        mNoMoreOutputData = true;
        mBufferFilled.signal();
        return;
#ifdef USE_ALP_AUDIO
        }
#endif
    }
    Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexOutput];
    for (size_t i = 0; i < buffers->size(); ++i) {
        BufferInfo *info = &buffers->editItemAt(i);
        if (info->mStatus == OWNED_BY_US) {
            fillOutputBuffer(&buffers->editItemAt(i));
        }
    }
}

void OMXCodec::fillOutputBuffer(BufferInfo *info) {
    CHECK_EQ((int)info->mStatus, (int)OWNED_BY_US);
    if (mNoMoreOutputData) {
        CODEC_LOGV("There is no more output data available, not "
             "calling fillOutputBuffer");
        return;
    }
    if (info->mMediaBuffer != NULL) {
        sp<GraphicBuffer> graphicBuffer = info->mMediaBuffer->graphicBuffer();
        if (graphicBuffer != 0) {
            // When using a native buffer we need to lock the buffer before
            // giving it to OMX.
            CODEC_LOGV("Calling lockBuffer on %p", info->mBuffer);
            int err = mNativeWindow->lockBuffer(mNativeWindow.get(),
                    graphicBuffer.get());
            if (err != 0) {
                CODEC_LOGE("lockBuffer failed w/ error 0x%08x", err);
                setState(ERROR);
                return;
            }
        }
    }
    CODEC_LOGV("Calling fillBuffer on buffer %p", info->mBuffer);

    status_t err = mOMX->fillBuffer(mNode, info->mBuffer);
    if (err != OK) {
        CODEC_LOGE("fillBuffer failed w/ error 0x%08x", err);
        setState(ERROR);
        return;
    }

    info->mStatus = OWNED_BY_COMPONENT;
}
發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章