pytorch張量基礎補充

# 創建tensor
import torch
import numpy as np

# 方式一:import from numpy
# a = np.array([2,3.3])
# b = torch.from_numpy(a)
# print(b)  # tensor([2.0000, 3.3000], dtype=torch.float64)

# a = np.ones([2,3])
# b = torch.from_numpy(a)
# print(b)
"""
tensor([[1., 1., 1.],
        [1., 1., 1.]], dtype=torch.float64)
        通過numpy來創建tensor
"""

# 方式二:import from list
# a = torch.tensor([2.,3.2])
# print(a) # tensor([2.0000, 3.2000])

# rand-隨機初始化在0到1之間的數字
# a = torch.rand(3,3)
# b = torch.rand_like(a)
# print(a)
# print(b)
"""
tensor([[0.1063, 0.1932, 0.0182],
        [0.7742, 0.0171, 0.9599],
        [0.7082, 0.4397, 0.6964]])
tensor([[0.4038, 0.0053, 0.0519],
        [0.3573, 0.3873, 0.5821],
        [0.0992, 0.3924, 0.8716]])
"""

# randnint-[min,max)-在min和max中間,包含min,不包括max
# a = torch.randint(1,10,[3,3])
# print(a)
"""
tensor([[1, 8, 6],
        [1, 1, 7],
        [6, 8, 3]])
"""


# randn-初始化爲均值爲0,方差爲1的正態分佈之間
# a = torch.randn(3,3)
# print(a)
"""
tensor([[ 1.2170,  0.1821,  0.1517],
        [-0.3523, -2.3274,  0.0781],
        [-0.1330, -0.4176,  2.1032]])
"""

# full-全部填充爲
# a = torch.full([2,3],7)  # 生成2行3列值全爲7的tensor
# print(a)
"""
tensor([[7., 7., 7.],
        [7., 7., 7.]])
"""
# b = torch.full([],7)  # 生成一個標量
# print(b)  # tensor(7.)

# c = torch.full([1],7)
# print(c)  # tensor([7.])


# arange/range
# a = torch.arange(0,10)  # 包括0不包括10
# print(a)  # tensor([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
#
# b = torch.arange(0,10,2)
# print(b)  # tensor([0, 2, 4, 6, 8])
#
# c = torch.range(0,10)  # 一般不用
# print(c)


# linspace/logspace
# a = torch.linspace(0,10,steps=4)  # 4表示等分爲4份,包括0和10
# print(a)  # tensor([ 0.0000,  3.3333,  6.6667, 10.0000])
#
# b = torch.logspace(0,-1,steps=10)  # 分成十份,用科學計數法表示
# print(b)
"""
tensor([1.0000, 0.7743, 0.5995, 0.4642, 0.3594, 0.2783, 0.2154, 0.1668, 0.1292,
        0.1000])
"""


# ones/zeros/eye
# a = torch.ones(3,3)
# print(a)
"""
tensor([[1., 1., 1.],
        [1., 1., 1.],
        [1., 1., 1.]])
"""

# b = torch.zeros(3,3)
# print(b)
"""
tensor([[0., 0., 0.],
        [0., 0., 0.],
        [0., 0., 0.]])
"""

# c = torch.eye(3,4)  # 對角
# print(c)
"""
tensor([[1., 0., 0., 0.],
        [0., 1., 0., 0.],
        [0., 0., 1., 0.]])

"""
# d = torch.eye(3)
# print(d)
"""
tensor([[1., 0., 0.],
        [0., 1., 0.],
        [0., 0., 1.]])
"""


# randperm -隨即打散
# a = torch.rand(2,3)
# b = torch.rand(2,2)
# idx = torch.randperm(2)
# print(idx)  # tensor([0, 1])
# print(idx)  # tensor([1, 0])


# 索引和切片
# 1、indexing
# a = torch.rand(4,3,28,28)
# print(a[0].shape)  # torch.Size([3, 28, 28])
# print(a[0,0].shape)  # torch.Size([28, 28])
# print(a[0,0,2,4])  # tensor(0.5293)

# 2、select first/last N
# print(a.shape)  # torch.Size([4, 3, 28, 28])
# print(a[:2].shape)  # torch.Size([2, 3, 28, 28])  ,取後兩張圖片
# print(a[:2,:1,:,:].shape)  # 取後兩張圖片的第一個通道  # torch.Size([2, 1, 28, 28])

# 3、select by steps
# print(a[:,:,0:28:2,0:28:2].shape)  # torch.Size([4, 3, 14, 14])
# print(a[:,:,::2,::2].shape)  # torch.Size([4, 3, 14, 14])

# 4、select by specific index
# print(a.index_select(0,[0,2]))  # 取第一個維度的0和2,其他維度不變
# print(a.index_select(2,torch.arange(28)).shape)  # torch.Size([4, 3, 28, 28]),2表示在第三個維度
# print(a.index_select(2,torch.arange(8)).shape)  # torch.Size([4, 3, 8, 28])

# 5、...
# print(a[...].shape)  # torch.Size([4, 3, 28, 28])
# print(a[0,...].shape)  # torch.Size([3, 28, 28])
# print(a[:,1,...].shape)  # torch.Size([4, 28, 28])

# 6、select by mask
# x = torch.randn(3,4)
# print(x)
"""
tensor([[ 0.0429, -0.2658, -1.3787, -1.2532],
        [-0.6789, -0.7480, -1.0623, -0.4357],
        [-0.4431, -1.3296, -0.8147,  0.8860]])
"""

# mask = x.ge(0.5)  # 大於0.5的爲True
# print(mask)
"""
ensor([[ True,  True,  True, False],
        [ True,  True, False, False],
        [False,  True,  True, False]])
"""
# print(torch.masked_select(x,mask))  # tensor([1.0286, 0.5371, 0.8558, 1.4676, 1.2245, 1.1207]),取出大於0.5的數



# 維度變換
# 1、view /reshape
# a = torch.rand(4,1,28,28)
# print(a.shape)
"""
torch.Size([4, 1, 28, 28])
"""
# print(a.view(4,28*28))
"""
tensor([[0.3209, 0.7425, 0.7396,  ..., 0.5740, 0.3687, 0.4657],
        [0.4059, 0.9838, 0.0928,  ..., 0.3335, 0.5015, 0.0125],
        [0.5801, 0.3556, 0.0628,  ..., 0.2922, 0.8510, 0.9451],
        [0.0798, 0.6093, 0.7713,  ..., 0.6038, 0.7236, 0.1421]])

"""
# print(a.view(4,28*28).shape)  # torch.Size([4, 784])


# 2、squeeze vs unsqueeze
# a = torch.rand(4,1,28,28)
# print(a.unsqueeze(0).shape)  # torch.Size([1, 4, 1, 28, 28]),在0維度前面插入一個維度
#
# print(a.unsqueeze(-1).shape)  # 在最後增加一個維度  # torch.Size([4, 1, 28, 28, 1])
# print(a.unsqueeze(4).shape)  # 在第四個維度插入  # torch.Size([4, 1, 28, 28, 1])
# print(a.unsqueeze(-4).shape)  # 最後一個位置爲-1  # torch.Size([4, 1, 1, 28, 28])
# print(a.unsqueeze(-5).shape)  # torch.Size([1, 4, 1, 28, 28])
# print(a.unsqueeze(5).shape)  # 出錯

# 例子
# b = torch.rand(32)
# f = torch.rand(4,32,14,14)
# b = b.unsqueeze(1).unsqueeze(2).unsqueeze(0)
# print(b.shape)  # torch.Size([1, 32, 1, 1])

# squeeze
# print(b.squeeze().shape)  # torch.Size([32])  沒有給參數,默認擠壓size爲1的維度
# print(b.squeeze(0).shape)  # torch.Size([32, 1, 1])
# print(b.squeeze(-1).shape)  # torch.Size([1, 32, 1])
# print(b.squeeze(1).shape)  # torch.Size([1, 32, 1, 1]),維度不是1,不能擠壓
# print(b.squeeze(-4).shape)  # torch.Size([32, 1, 1])


# 3、expand / repeat
# a = torch.rand(4,32,14,14)
# b = torch.rand(1,32,1,1)
# print(b.expand(4,32,14,14).shape)  # torch.Size([4, 32, 14, 14])
# print(b.expand(-1,32,-1,-1).shape)  # torch.Size([1, 32, 1, 1]),不變的維度寫-1
# print(b.expand(-1,32,-1,-4).shape)  # torch.Size([1, 32, 1, -4]),有bug

# repeat
# print(b.repeat(4,32,1,1).shape)  # torch.Size([4, 1024, 1, 1]),repeat裏面括號的數字表示要複製的次數
# print(b.repeat(4,1,1,1).shape)  # torch.Size([4, 32, 1, 1])
# print(b.repeat(4,1,32,32).shape)  # torch.Size([4, 32, 32, 32])

# 4、.t
# a = torch.randn(3,4)
# print(a)
"""
tensor([[-0.0587, -0.8892, -0.8047, -0.6415],
        [-0.7105, -0.5036,  0.2471, -0.3749],
        [ 0.2108,  0.2228,  0.8637, -0.2582]])
"""
# print(a.t())
"""
tensor([[-0.0587, -0.7105,  0.2108],
        [-0.8892, -0.5036,  0.2228],
        [-0.8047,  0.2471,  0.8637],
        [-0.6415, -0.3749, -0.2582]])
"""
# 注意:.t只適用於矩陣,不適用於4維

# 5、transpose
# a = torch.rand(4,3,32,32)
# # a1 = a.transpose(1,3).view(4,3*32*32).view(4,3,32,32)  # 錯誤
# a1 = a.transpose(1,3).contiguous().view(4,3*32*32).view(4,3,32,32)
# a2 = a.transpose(1,3).contiguous().view(4,3*32*32).view(4,32,32,3).transpose(1,3)
# print(a1.shape)  # torch.Size([4, 3, 32, 32])
# print(a2.shape)  # torch.Size([4, 3, 32, 32])
#
# print(torch.all(torch.eq(a,a1)))  # tensor(False)
# print(torch.all(torch.eq(a,a2)))  # tensor(True)


# 6、permute
a = torch.rand(4,3,28,28)
print(a.transpose(1,3).shape)  # torch.Size([4, 28, 28, 3])

b = torch.rand(4,3,28,28)
print(b.transpose(1,3).shape)  # torch.Size([4, 28, 28, 3]

print(b.transpose(1,3).transpose(1,2).shape)  # torch.Size([4, 28, 28, 3])

print(b.permute(0,2,3,1).shape)  # torch.Size([4, 28, 28, 3])

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章