以SSD各層爲例對反捲積參數進行設置

從最末尾一層 到初始各層上採樣參數設置示例:

import torch
from torch import nn
from torch.nn import init
from torch.autograd import Variable

def deconv(x, k=3, s=2, p=1):
    i = x.size(1)
    o = x.size(1)
    dconv = nn.ConvTranspose2d(in_channels=i, out_channels=o, kernel_size=k, stride=s,
                               padding=p, output_padding=0, bias=False)
    init.constant_(dconv.weight, 1)
    return dconv(x)

# in (1,1) -> out (3,3)
x = Variable(torch.ones(1,1,1,1))
feat1_3 = deconv(x, k=3, s=2, p=0)
print(feat1_3.size())

# in (3,3) -> out (5,5)
x = Variable(torch.ones(1,1,3,3))
feat3_5 = deconv(x, k=3, s=2, p=1)
print(feat3_5.size())

# (5,5) -> (10,10)
x = Variable(torch.ones(1,1,5,5))
feat5_10 = deconv(x, k=2, s=2, p=0)
print(feat5_10.size())

# (10, 10) -> (19,19)
x = Variable(torch.ones(1,1,10,10))
feat10_19 = deconv(x, k=3, s=2, p=1)
print(feat10_19.size())

# (19, 19) -> (38, 38)
x = Variable(torch.ones(1,1,19,19))
feat19_38 = deconv(x, k=2, s=2, p=0)
print(feat19_38.size())

# (38, 38) -> (75, 75)
x = Variable(torch.ones(1,1,38,38))
feat38_75 = deconv(x, k=3, s=2, p=1)
print(feat38_75.size())

# (75, 75) -> (150, 150)
x = Variable(torch.ones(1,1,75,75))
feat75_150 = deconv(x, k=2, s=2, p=0)
print(feat75_150.size())

# (150, 150) -> (150, 150)
x = Variable(torch.ones(1,1,150,150))
feat150_300 = deconv(x, k=2, s=2, p=0)
print(feat150_300.size())

使用方法:

def _deconv(self, x, lnum):
        i = x.size(1)
        o = x.size(1)
        cfg = [(3,2,0), (3,2,1), (2,2,0), (3,2,1), (2,2,0), (3,2,1), (2,2,0), (2,2,0)] # [1,3,5,10,19,38,75,150,300]
        for k,s,p in cfg[5-lnum:]:
            dconv = nn.ConvTranspose2d(in_channels=i, out_channels=o, kernel_size=k, stride=s,
                                       padding=p, output_padding=0, bias=False)
            x = dconv(x)
            x = torch.sigmoid(x)
        return x

 

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章