在上一篇文章擴展的merge模式中,當前CU在生成的merge list中選擇一個率失真代價最小的候選項的運動信息直接作爲自己的運動信息。而MMVD是將候選項的運動信息作爲自己MV的預測值,需要編碼傳輸MVD(自己最優MV與預測MV的差值)。
MMVD起源於之前提案中的ultimate motion vector expression(UMVE)技術,UMVE是一種新的運動向量表示方法,在skip和merge模式中使用起始點、運動步長、運動方向三個量來表示運動向量。
在VTM5中,MMVD首先從merge list中取出前2個MRG_TYPE_DEFAULT_N類型的候選項作爲初始向量。在VTM5中定義如下:
static const int MMVD_BASE_MV_NUM = 2; ///< max number of base candidate
//!<從merge list裏取2個初始MV
void PU::getInterMMVDMergeCandidates(const PredictionUnit &pu, MergeCtx& mrgCtx, const int& mrgCandIdx)
{
int refIdxList0, refIdxList1;
int k;
int currBaseNum = 0;
const uint16_t maxNumMergeCand = mrgCtx.numValidMergeCand;
for (k = 0; k < maxNumMergeCand; k++)
{//!<只取MRG_TYPE_DEFAULT_N類型候選項
if (mrgCtx.mrgTypeNeighbours[k] == MRG_TYPE_DEFAULT_N)
{
refIdxList0 = mrgCtx.mvFieldNeighbours[(k << 1)].refIdx;
refIdxList1 = mrgCtx.mvFieldNeighbours[(k << 1) + 1].refIdx;
if ((refIdxList0 >= 0) && (refIdxList1 >= 0))
{
mrgCtx.mmvdBaseMv[currBaseNum][0] = mrgCtx.mvFieldNeighbours[(k << 1)];
mrgCtx.mmvdBaseMv[currBaseNum][1] = mrgCtx.mvFieldNeighbours[(k << 1) + 1];
}
else if (refIdxList0 >= 0)
{
mrgCtx.mmvdBaseMv[currBaseNum][0] = mrgCtx.mvFieldNeighbours[(k << 1)];
mrgCtx.mmvdBaseMv[currBaseNum][1] = MvField(Mv(0, 0), -1);
}
else if (refIdxList1 >= 0)
{
mrgCtx.mmvdBaseMv[currBaseNum][0] = MvField(Mv(0, 0), -1);
mrgCtx.mmvdBaseMv[currBaseNum][1] = mrgCtx.mvFieldNeighbours[(k << 1) + 1];
}
currBaseNum++;
//!<只取2個base MV
if (currBaseNum == MMVD_BASE_MV_NUM)
break;
}
}
#if !JVET_N0448_N0380
if (currBaseNum < MMVD_BASE_MV_NUM)
{
for (k = currBaseNum; k < MMVD_BASE_MV_NUM; k++)
{
mrgCtx.mmvdBaseMv[k][0] = MvField(Mv(0, 0), 0);
const Slice &slice = *pu.cs->slice;
mrgCtx.mmvdBaseMv[k][1] = MvField(Mv(0, 0), (slice.isInterB() ? 0 : -1));
mrgCtx.GBiIdx[k] = GBI_DEFAULT;
mrgCtx.interDirNeighbours[k] = (mrgCtx.mmvdBaseMv[k][0].refIdx >= 0) + (mrgCtx.mmvdBaseMv[k][1].refIdx >= 0) * 2;
}
}
#endif
}
提取出初始候選項後以候選項在參考幀中指向的位置爲起始點,以4個方向、8個步長分別形成不同的運動向量(MV)。一共2個候選MV,每個候選MV生成4x8=32個新的MV,所以2個候選MV一共生成64個新的MV。關於步長和方向定義如下:
Distance IDX | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 |
---|---|---|---|---|---|---|---|---|
Offset (in unit of luma sample) | 1/4 | 1/2 | 1 | 2 | 4 | 8 | 16 | 32 |
Direction IDX | 00 | 01 | 10 | 11 |
---|---|---|---|---|
x-axis | + | – | N/A | N/A |
y-axis | N/A | N/A | + | – |
然後遍歷這64個候選MV選擇率失真代價最小的MV作爲MMVD最終的MV,但是傳輸時只需要傳輸(1)該MV的初始MV在merge list中的索引。(2)運動方向。(3)運動步長。
當初始MV是un-prediction MV或是雙向預測MV且兩個參考幀在當前幀的同一側時,雙向預測的2個MV都等於起始MV直接加上offset。如果起始MV是雙向預測MV且兩個參考幀在當前幀不同側,則list0的MV等於起始MV加上offset,而list1的MV等於起始MV加上offset的相反數。
生成新的候選MV的代碼如下:
void MergeCtx::setMmvdMergeCandiInfo(PredictionUnit& pu, int candIdx)
{
const Slice &slice = *pu.cs->slice;
const int mvShift = MV_FRACTIONAL_BITS_DIFF;
//!<4 8 16 32 64 128 256 512,1/16像素精度
const int refMvdCands[8] = { 1 << mvShift , 2 << mvShift , 4 << mvShift , 8 << mvShift , 16 << mvShift , 32 << mvShift, 64 << mvShift , 128 << mvShift };
int fPosGroup = 0;
int fPosBaseIdx = 0;
int fPosStep = 0;
int tempIdx = 0;
int fPosPosition = 0;
Mv tempMv[2];
tempIdx = candIdx; //!<等於0-63
fPosGroup = tempIdx / (MMVD_BASE_MV_NUM * MMVD_MAX_REFINE_NUM); //!<等於0
tempIdx = tempIdx - fPosGroup * (MMVD_BASE_MV_NUM * MMVD_MAX_REFINE_NUM); //!<這兩步相當於對64取餘,結果0-63
fPosBaseIdx = tempIdx / MMVD_MAX_REFINE_NUM; //!<等於0或1,初始向量的索引
tempIdx = tempIdx - fPosBaseIdx * (MMVD_MAX_REFINE_NUM); //!<這兩步相當於對32取餘,結果0-31
fPosStep = tempIdx / 4; //!<等於0、1、2、3、4、5、6、7表示8個步長
fPosPosition = tempIdx - fPosStep * (4); //!<這兩步相當於對4取餘,等於0、1、2、3表示4個方向的索引
int offset = refMvdCands[fPosStep];
if ( pu.cu->slice->getDisFracMMVD() )
{
offset <<= 2;
}
const int refList0 = mmvdBaseMv[fPosBaseIdx][0].refIdx;
const int refList1 = mmvdBaseMv[fPosBaseIdx][1].refIdx;
if ((refList0 != -1) && (refList1 != -1))
{
const int poc0 = slice.getRefPOC(REF_PIC_LIST_0, refList0);
const int poc1 = slice.getRefPOC(REF_PIC_LIST_1, refList1);
const int currPoc = slice.getPOC();
if (fPosPosition == 0)
{
tempMv[0] = Mv(offset, 0);
}
else if (fPosPosition == 1)
{
tempMv[0] = Mv(-offset, 0);
}
else if (fPosPosition == 2)
{
tempMv[0] = Mv(0, offset);
}
else
{
tempMv[0] = Mv(0, -offset);
}//!<前後參考幀相同
if ((poc0 - currPoc) == (poc1 - currPoc))
{
tempMv[1] = tempMv[0];
}//!<後向參考幀離得遠
else if (abs(poc1 - currPoc) > abs(poc0 - currPoc))
{//!<如果前後參考幀在當前幀不同側scale爲負值,同側爲正值
const int scale = PU::getDistScaleFactor(currPoc, poc0, currPoc, poc1);
tempMv[1] = tempMv[0];
#if JVET_N0332_LTRP_MMVD_FIX
const bool isL0RefLongTerm = slice.getRefPic(REF_PIC_LIST_0, refList0)->longTerm;
const bool isL1RefLongTerm = slice.getRefPic(REF_PIC_LIST_1, refList1)->longTerm;
if (isL0RefLongTerm || isL1RefLongTerm)
{
if ((poc1 - currPoc)*(poc0 - currPoc) > 0)
{
tempMv[0] = tempMv[1];
}
else
{
tempMv[0].set(-1 * tempMv[1].getHor(), -1 * tempMv[1].getVer());
}
}
else
#endif
tempMv[0] = tempMv[1].scaleMv(scale);
}
else
{
const int scale = PU::getDistScaleFactor(currPoc, poc1, currPoc, poc0);
#if JVET_N0332_LTRP_MMVD_FIX
const bool isL0RefLongTerm = slice.getRefPic(REF_PIC_LIST_0, refList0)->longTerm;
const bool isL1RefLongTerm = slice.getRefPic(REF_PIC_LIST_1, refList1)->longTerm;
if (isL0RefLongTerm || isL1RefLongTerm) //!<存在長期參考幀
{ //!<前後參考幀在當前幀同一側
if ((poc1 - currPoc)*(poc0 - currPoc) > 0)
{
tempMv[1] = tempMv[0];
}
else
{//!<前後參考幀在當前幀不同側,list1與list0相反
tempMv[1].set(-1 * tempMv[0].getHor(), -1 * tempMv[0].getVer());
}
}
else
#endif
tempMv[1] = tempMv[0].scaleMv(scale);
}
pu.interDir = 3;
pu.mv[REF_PIC_LIST_0] = mmvdBaseMv[fPosBaseIdx][0].mv + tempMv[0];
pu.refIdx[REF_PIC_LIST_0] = refList0;
pu.mv[REF_PIC_LIST_1] = mmvdBaseMv[fPosBaseIdx][1].mv + tempMv[1];
pu.refIdx[REF_PIC_LIST_1] = refList1;
}
else if (refList0 != -1)
{
if (fPosPosition == 0)
{
tempMv[0] = Mv(offset, 0);
}
else if (fPosPosition == 1)
{
tempMv[0] = Mv(-offset, 0);
}
else if (fPosPosition == 2)
{
tempMv[0] = Mv(0, offset);
}
else
{
tempMv[0] = Mv(0, -offset);
}
pu.interDir = 1;
pu.mv[REF_PIC_LIST_0] = mmvdBaseMv[fPosBaseIdx][0].mv + tempMv[0];
pu.refIdx[REF_PIC_LIST_0] = refList0;
pu.mv[REF_PIC_LIST_1] = Mv(0, 0);
pu.refIdx[REF_PIC_LIST_1] = -1;
}
else if (refList1 != -1)
{
if (fPosPosition == 0)
{
tempMv[1] = Mv(offset, 0);
}
else if (fPosPosition == 1)
{
tempMv[1] = Mv(-offset, 0);
}
else if (fPosPosition == 2)
{
tempMv[1] = Mv(0, offset);
}
else
{
tempMv[1] = Mv(0, -offset);
}
pu.interDir = 2;
pu.mv[REF_PIC_LIST_0] = Mv(0, 0);
pu.refIdx[REF_PIC_LIST_0] = -1;
pu.mv[REF_PIC_LIST_1] = mmvdBaseMv[fPosBaseIdx][1].mv + tempMv[1];
pu.refIdx[REF_PIC_LIST_1] = refList1;
}
pu.mmvdMergeFlag = true;
pu.mmvdMergeIdx = candIdx;
pu.mergeFlag = true;
#if JVET_N0324_REGULAR_MRG_FLAG
pu.regularMergeFlag = false;
#endif
pu.mergeIdx = candIdx;
pu.mergeType = MRG_TYPE_DEFAULT_N;
pu.mvd[REF_PIC_LIST_0] = Mv();
pu.mvd[REF_PIC_LIST_1] = Mv();
pu.mvpIdx[REF_PIC_LIST_0] = NOT_VALID;
pu.mvpIdx[REF_PIC_LIST_1] = NOT_VALID;
pu.mvpNum[REF_PIC_LIST_0] = NOT_VALID;
pu.mvpNum[REF_PIC_LIST_1] = NOT_VALID;
pu.cu->GBiIdx = (interDirNeighbours[fPosBaseIdx] == 3) ? GBiIdx[fPosBaseIdx] : GBI_DEFAULT;
#if JVET_N0334_MVCLIPPING
for (int refList = 0; refList < 2; refList++)
{
if (pu.refIdx[refList] >= 0)
{
pu.mv[refList].clipToStorageBitDepth();
}
}
#endif
PU::restrictBiPredMergeCandsOne(pu);
}
參考
JVET-K0115
JVET-L0054
JVET-N1002
感興趣的請關注微信公衆號Video Coding