PyTorch model - optimizer - state_dict() - torch.save(config, save_path) - torch.load(load_path)

PyTorch model - optimizer - state_dict() - torch.save(config, save_path) - torch.load(load_path)

1. state_dict()

#!/usr/bin/env python
# -*- coding: utf-8 -*-
# yongqiang cheng

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim


# Define model
class TheModelClass(nn.Module):
    def __init__(self):
        super(TheModelClass, self).__init__()
        self.conv1 = nn.Conv2d(3, 6, 5)
        self.pool = nn.MaxPool2d(2, 2)
        self.conv2 = nn.Conv2d(6, 16, 5)
        self.fc1 = nn.Linear(16 * 5 * 5, 120)
        self.fc2 = nn.Linear(120, 84)
        self.fc3 = nn.Linear(84, 10)

    def forward(self, x):
        x = self.pool(F.relu(self.conv1(x)))
        x = self.pool(F.relu(self.conv2(x)))
        x = x.view(-1, 16 * 5 * 5)
        x = F.relu(self.fc1(x))
        x = F.relu(self.fc2(x))
        x = self.fc3(x)
        return x


# Initialize model
model = TheModelClass()

# Initialize optimizer
optimizer = optim.SGD(model.parameters(), lr=0.001, momentum=0.9)

# Print model's state_dict
print("Model's state_dict:")
for param_tensor in model.state_dict():
    print(param_tensor, "\t", model.state_dict()[param_tensor].size())

# Print optimizer's state_dict
print("Optimizer's state_dict:")
for var_name in optimizer.state_dict():
    print(var_name, "\t", optimizer.state_dict()[var_name])

/home/yongqiang/miniconda3/envs/pt-1.4_py-3.6/bin/python /home/yongqiang/pytorch_work/end2end-asr-pytorch-example/yongqiang.py
Model's state_dict:
conv1.weight 	 torch.Size([6, 3, 5, 5])
conv1.bias 	 torch.Size([6])
conv2.weight 	 torch.Size([16, 6, 5, 5])
conv2.bias 	 torch.Size([16])
fc1.weight 	 torch.Size([120, 400])
fc1.bias 	 torch.Size([120])
fc2.weight 	 torch.Size([84, 120])
fc2.bias 	 torch.Size([84])
fc3.weight 	 torch.Size([10, 84])
fc3.bias 	 torch.Size([10])
Optimizer's state_dict:
state 	 {}
param_groups 	 [{'lr': 0.001, 'momentum': 0.9, 'dampening': 0, 'weight_decay': 0, 'nesterov': False, 'params': [140726061376352, 140726040176608, 140726040176680, 140726040176752, 140726040176824, 140726040176896, 140726040176968, 140726040177040, 140726040177112, 140726040177184]}]

Process finished with exit code 0

2. state_dict()

#!/usr/bin/env python
# -*- coding: utf-8 -*-
# yongqiang cheng

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim


# Define model
class TheModelClass(nn.Module):
    def __init__(self):
        super(TheModelClass, self).__init__()
        self.conv1 = nn.Conv2d(3, 6, 5)
        self.pool = nn.MaxPool2d(2, 2)
        self.conv2 = nn.Conv2d(6, 16, 5)
        self.fc1 = nn.Linear(16 * 5 * 5, 120)
        self.fc2 = nn.Linear(120, 84)
        self.fc3 = nn.Linear(84, 10)

    def forward(self, x):
        x = self.pool(F.relu(self.conv1(x)))
        x = self.pool(F.relu(self.conv2(x)))
        x = x.view(-1, 16 * 5 * 5)
        x = F.relu(self.fc1(x))
        x = F.relu(self.fc2(x))
        x = self.fc3(x)
        return x


# Initialize model
model = TheModelClass()

# Initialize optimizer
optimizer = optim.SGD(model.parameters(), lr=0.001, momentum=0.9)

# Print model's state_dict
print("Model's state_dict:")
for param_tensor in model.state_dict():
    print(param_tensor, "\t", model.state_dict()[param_tensor].size())

# Print optimizer's state_dict
print("Optimizer's state_dict:")
for var_name in optimizer.state_dict():
    print(var_name, "\t", optimizer.state_dict()[var_name])

# Print model's state_dict
print("Model's state_dict:")
for param_tensor in model.state_dict():
    print(param_tensor, "\n", model.state_dict()[param_tensor])

/home/yongqiang/miniconda3/envs/pt-1.4_py-3.6/bin/python /home/yongqiang/pytorch_work/end2end-asr-pytorch-example/yongqiang.py
Model's state_dict:
conv1.weight 	 torch.Size([6, 3, 5, 5])
conv1.bias 	 torch.Size([6])
conv2.weight 	 torch.Size([16, 6, 5, 5])
conv2.bias 	 torch.Size([16])
fc1.weight 	 torch.Size([120, 400])
fc1.bias 	 torch.Size([120])
fc2.weight 	 torch.Size([84, 120])
fc2.bias 	 torch.Size([84])
fc3.weight 	 torch.Size([10, 84])
fc3.bias 	 torch.Size([10])
Optimizer's state_dict:
state 	 {}
param_groups 	 [{'lr': 0.001, 'momentum': 0.9, 'dampening': 0, 'weight_decay': 0, 'nesterov': False, 'params': [140109876585384, 140109855434792, 140109855434864, 140109855434936, 140109855435008, 140109855435080, 140109855435152, 140109855435224, 140109855435296, 140109855435368]}]
Model's state_dict:
conv1.weight 
......
conv1.bias 
 tensor([-0.0674, -0.1050, -0.0501, -0.0006, -0.0412,  0.0723])
conv2.weight 
......
conv2.bias 
 tensor([-0.0759, -0.0430, -0.0360,  0.0326, -0.0434, -0.0319, -0.0762, -0.0777,
        -0.0670,  0.0813, -0.0459, -0.0032,  0.0653, -0.0217, -0.0804, -0.0423])
fc1.weight 
 tensor([[-0.0215,  0.0353,  0.0108,  ...,  0.0245, -0.0217,  0.0306],
        [-0.0069, -0.0206, -0.0316,  ..., -0.0325,  0.0455, -0.0178],
        [ 0.0082,  0.0180,  0.0067,  ..., -0.0385,  0.0237,  0.0232],
        ...,
        [ 0.0438, -0.0409, -0.0337,  ...,  0.0358, -0.0055,  0.0378],
        [ 0.0077, -0.0468,  0.0162,  ..., -0.0433, -0.0359, -0.0240],
        [-0.0498,  0.0463, -0.0128,  ..., -0.0427,  0.0169,  0.0093]])
fc1.bias 
......
fc2.weight 
 tensor([[ 0.0489,  0.0608,  0.0596,  ..., -0.0331, -0.0158,  0.0263],
        [-0.0729, -0.0118, -0.0794,  ...,  0.0427, -0.0092, -0.0524],
        [-0.0814,  0.0552,  0.0365,  ...,  0.0676,  0.0044,  0.0455],
        ...,
        [ 0.0636,  0.0371, -0.0887,  ..., -0.0207, -0.0367, -0.0761],
        [-0.0584,  0.0579, -0.0076,  ...,  0.0863, -0.0167, -0.0223],
        [ 0.0247, -0.0500, -0.0751,  ..., -0.0557, -0.0673,  0.0164]])
fc2.bias 
......
fc3.weight 
......
fc3.bias 
 tensor([ 0.0501,  0.0460, -0.1056, -0.0683,  0.0583, -0.0780,  0.0997,  0.0550,
         0.0777, -0.0156])

Process finished with exit code 0

3. torch.save(config, save_path)

#!/usr/bin/env python
# -*- coding: utf-8 -*-
# yongqiang cheng

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import os
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim


def save_model(model, epoch, optimizer, save_folder, name, loss, best_model=False):
    """
    Saving model
    """

    if best_model:
        save_path = "{}/{}/best_model.th".format(save_folder, name)
    else:
        save_path = "{}/{}/epoch_{}.th".format(save_folder, name, epoch)

    if not os.path.exists(save_folder + "/" + name):
        os.makedirs(save_folder + "/" + name)

    print("SAVE MODEL to", save_path)
    if loss == "ce":
        config = {
            'label2id': "label2id",
            'id2label': "id2label",
            'args': "args",
            'epoch': "epoch",
            'model_state_dict': model.state_dict(),
            'optimizer_state_dict': optimizer.state_dict(),
            'optimizer_params': {
                '_step': "_step",
                '_rate': "_rate",
                'warmup': "warmup",
                'factor': "factor",
                'model_size': "model_size"
            },
            'metrics': "metrics"
        }
    elif loss == "ctc":
        config = {
            'label2id': "label2id",
            'id2label': "id2label",
            'args': "args",
            'epoch': epoch,
            'model_state_dict': model.state_dict(),
            'optimizer_state_dict': optimizer.state_dict(),
            'optimizer_params': {
                'lr': "lr",
                'lr_anneal': "lr_anneal"
            },
            'metrics': "metrics"
        }
    else:
        print("Loss is not defined")

    torch.save(config, save_path)


# Define model
class TheModelClass(nn.Module):
    def __init__(self):
        super(TheModelClass, self).__init__()
        self.conv1 = nn.Conv2d(3, 6, 5)
        self.pool = nn.MaxPool2d(2, 2)
        self.conv2 = nn.Conv2d(6, 16, 5)
        self.fc1 = nn.Linear(16 * 5 * 5, 120)
        self.fc2 = nn.Linear(120, 84)
        self.fc3 = nn.Linear(84, 10)

    def forward(self, x):
        x = self.pool(F.relu(self.conv1(x)))
        x = self.pool(F.relu(self.conv2(x)))
        x = x.view(-1, 16 * 5 * 5)
        x = F.relu(self.fc1(x))
        x = F.relu(self.fc2(x))
        x = self.fc3(x)
        return x


# Initialize model
model = TheModelClass()

# Initialize optimizer
optimizer = optim.SGD(model.parameters(), lr=0.001, momentum=0.9)

# Print model's state_dict
print("Model's state_dict:")
for param_tensor in model.state_dict():
    print(param_tensor, "\t", model.state_dict()[param_tensor].size())

# Print optimizer's state_dict
print("Optimizer's state_dict:")
for var_name in optimizer.state_dict():
    print(var_name, "\t", optimizer.state_dict()[var_name])

# Print model's state_dict
print("Model's state_dict:")
for param_tensor in model.state_dict():
    print(param_tensor, "\n", model.state_dict()[param_tensor])

model_folder = "/home/yongqiang/pytorch_work/end2end-asr-pytorch-example"
save_model(model=model, epoch=9, optimizer=optimizer, save_folder=model_folder, name="log", loss="ctc", best_model=False)

/home/yongqiang/miniconda3/envs/pt-1.4_py-3.6/bin/python /home/yongqiang/pytorch_work/end2end-asr-pytorch-example/yongqiang.py
Model's state_dict:
conv1.weight 	 torch.Size([6, 3, 5, 5])
conv1.bias 	 torch.Size([6])
conv2.weight 	 torch.Size([16, 6, 5, 5])
conv2.bias 	 torch.Size([16])
fc1.weight 	 torch.Size([120, 400])
fc1.bias 	 torch.Size([120])
fc2.weight 	 torch.Size([84, 120])
fc2.bias 	 torch.Size([84])
fc3.weight 	 torch.Size([10, 84])
fc3.bias 	 torch.Size([10])
Optimizer's state_dict:
state 	 {}
param_groups 	 [{'lr': 0.001, 'momentum': 0.9, 'dampening': 0, 'weight_decay': 0, 'nesterov': False, 'params': [139858574714200, 139858553707328, 139858553707400, 139858553707472, 139858553707544, 139858553707616, 139858553707688, 139858553707760, 139858553707832, 139858553707904]}]
Model's state_dict:
conv1.weight 
 tensor([[[[ 0.0134, -0.0569, -0.0762, -0.0897, -0.0430],
          [ 0.0423, -0.0034, -0.0318,  0.0950, -0.0198],
          [ 0.0963,  0.0397, -0.0560,  0.0744,  0.0987],
          [ 0.0997,  0.0042, -0.0353, -0.0677,  0.0536],
          [ 0.0791,  0.0338, -0.0949, -0.0367,  0.0982]],

         [[ 0.0951,  0.0567,  0.1046, -0.0704, -0.0307],
          [ 0.0991,  0.0478,  0.0778,  0.0336,  0.0927],
          [ 0.0694,  0.0703,  0.1084,  0.1068,  0.0833],
          [-0.0444,  0.0951,  0.0369, -0.1047,  0.0575],
          [ 0.0866,  0.1128,  0.0547, -0.0558,  0.0164]],

         [[ 0.0562, -0.1008, -0.0253,  0.0131,  0.0880],
          [-0.0422, -0.0437,  0.0181, -0.0247, -0.0727],
          [-0.0072,  0.0113,  0.0032,  0.0571,  0.0431],
          [-0.0011, -0.0778,  0.0533,  0.0441,  0.0252],
          [-0.0267,  0.0294, -0.0218, -0.1044, -0.0390]]],


        [[[ 0.0848,  0.1063, -0.0568, -0.0719,  0.0082],
          [-0.0797,  0.0945, -0.0328, -0.0290, -0.0759],
          [ 0.0794, -0.0985, -0.0748, -0.0041,  0.0019],
          [-0.0375, -0.0602, -0.0819, -0.1079,  0.0773],
          [ 0.0944, -0.0515, -0.0621,  0.0124,  0.0627]],

         [[ 0.0582, -0.1018,  0.0449, -0.0540,  0.0171],
          [-0.0551,  0.1112, -0.0233, -0.1066,  0.0431],
          [ 0.0936, -0.1010, -0.0952, -0.0989, -0.0224],
          [-0.0374, -0.0663, -0.1145, -0.0551, -0.0156],
          [-0.0178, -0.0025, -0.0656, -0.0878,  0.0368]],

         [[ 0.0199, -0.0584,  0.0842, -0.1144, -0.0983],
          [ 0.0355,  0.0959, -0.0010, -0.0964,  0.0487],
          [-0.0616, -0.0055,  0.0847,  0.0304, -0.0560],
          [ 0.0960, -0.1062,  0.1005, -0.0658, -0.0791],
          [ 0.0265, -0.0321, -0.0826, -0.1112, -0.0275]]],


        [[[-0.1136, -0.0062, -0.0536, -0.0138, -0.1113],
          [ 0.0939,  0.0356, -0.0073,  0.0578,  0.0617],
          [ 0.0470,  0.0967, -0.0801,  0.1070,  0.0272],
          [ 0.0662,  0.0654,  0.0242,  0.1028, -0.0372],
          [ 0.0699,  0.0887,  0.1111,  0.0259, -0.0816]],

         [[ 0.0348,  0.0764,  0.0776, -0.0557,  0.0993],
          [ 0.0266, -0.0372, -0.0937,  0.0853, -0.0370],
          [ 0.0783, -0.0296,  0.0722, -0.1091, -0.1092],
          [ 0.1108,  0.1095, -0.0578,  0.0673,  0.0882],
          [ 0.0088, -0.0430,  0.0211,  0.1035, -0.0614]],

         [[ 0.0309,  0.0961, -0.0820,  0.0557,  0.1115],
          [ 0.0167,  0.0800,  0.0058,  0.1004, -0.0041],
          [-0.0032, -0.0768,  0.0156,  0.0554,  0.0967],
          [-0.0623, -0.0133,  0.0419,  0.0855, -0.0093],
          [-0.0782,  0.0276, -0.0715, -0.0109,  0.0090]]],


        [[[-0.0351,  0.0579, -0.0033,  0.0753,  0.0762],
          [ 0.0846,  0.0981, -0.0195, -0.0937,  0.0385],
          [-0.0608, -0.0624,  0.0757,  0.0834, -0.0371],
          [ 0.0948, -0.0822, -0.1009,  0.1132, -0.1114],
          [ 0.0245,  0.0276,  0.0120, -0.0866,  0.0686]],

         [[ 0.0277,  0.0420,  0.0820,  0.0180, -0.0904],
          [-0.0340,  0.0228,  0.1026,  0.0177,  0.0052],
          [-0.0608, -0.0869, -0.1016,  0.0253,  0.1009],
          [-0.0366, -0.1122,  0.0936, -0.0223, -0.0959],
          [-0.0824,  0.1027,  0.0007, -0.0691, -0.0679]],

         [[-0.0165, -0.0798,  0.0052, -0.0003, -0.1124],
          [-0.0151,  0.0668,  0.1082, -0.0442, -0.1077],
          [-0.0055, -0.0172,  0.0627,  0.0263, -0.0847],
          [ 0.0741,  0.1126,  0.0694, -0.0641, -0.0290],
          [-0.0695, -0.0944,  0.0760,  0.1055,  0.1067]]],


        [[[-0.0462, -0.0732, -0.0339, -0.0569,  0.0344],
          [ 0.0217, -0.0960, -0.1135, -0.0820,  0.0353],
          [ 0.0226,  0.0066,  0.0191,  0.0522,  0.0986],
          [-0.0946,  0.0257, -0.0353,  0.0866, -0.0039],
          [ 0.0354, -0.0436, -0.0857,  0.0711,  0.0421]],

         [[-0.0715,  0.0448, -0.0478,  0.0755, -0.0578],
          [ 0.1127, -0.0526, -0.1046,  0.0710, -0.0189],
          [-0.0079,  0.1007, -0.0464, -0.0967,  0.0137],
          [ 0.0587, -0.0915,  0.0212,  0.0212,  0.0259],
          [ 0.0572,  0.0283,  0.0013,  0.1119, -0.0051]],

         [[ 0.0268,  0.0909, -0.0809, -0.0398,  0.1101],
          [ 0.1020,  0.0993,  0.0176, -0.0125, -0.0770],
          [ 0.0786, -0.1061,  0.0761, -0.0004,  0.0296],
          [-0.0290, -0.0337,  0.0569, -0.0355, -0.0754],
          [ 0.0760, -0.0028,  0.0969, -0.0407,  0.0886]]],


        [[[ 0.0132, -0.0045, -0.0614,  0.0071, -0.1035],
          [ 0.0208,  0.0518, -0.1080, -0.0129, -0.0524],
          [-0.0328,  0.0093, -0.0668,  0.0273,  0.0307],
          [ 0.0311,  0.0798,  0.0596, -0.0256, -0.0979],
          [-0.0542, -0.0553, -0.0613, -0.0782, -0.0579]],

         [[-0.0875, -0.0210,  0.0966, -0.0768, -0.0592],
          [ 0.0897,  0.0828,  0.0343,  0.0133, -0.0669],
          [-0.0986, -0.0866, -0.1075,  0.0613,  0.0486],
          [-0.1153,  0.1053,  0.0163, -0.0813, -0.0268],
          [-0.0267,  0.1103, -0.0289, -0.0212, -0.0427]],

         [[-0.0471, -0.0681,  0.0008,  0.0685, -0.0726],
          [-0.0161, -0.0378,  0.0142, -0.0448, -0.0542],
          [-0.0594, -0.1057, -0.0388,  0.0282, -0.0045],
          [-0.0942,  0.1069, -0.0601, -0.1092,  0.0658],
          [ 0.0058, -0.0673, -0.0139,  0.0544,  0.0266]]]])
conv1.bias 
 tensor([-0.0518,  0.0757, -0.0106,  0.0676, -0.0081,  0.0287])
conv2.weight 
 tensor([[[[ 3.7345e-02,  3.6917e-02,  7.1036e-04, -1.8674e-02, -2.0425e-02],
          [ 6.0888e-02,  3.9115e-02,  6.0703e-02, -4.9426e-02,  2.0759e-03],
          [ 4.4816e-02,  2.4605e-02,  7.5611e-02,  7.0319e-02, -4.8377e-03],
          [-5.8067e-02,  2.4481e-02, -7.9937e-02, -6.9166e-02,  4.6737e-02],
          [-1.4972e-02, -7.3878e-02, -7.6790e-02, -1.9032e-02, -1.9925e-02]],

         [[ 2.2720e-02, -2.5971e-02, -6.3477e-02,  1.7530e-02,  2.5905e-02],
          [-6.4675e-02, -6.6284e-02, -8.0954e-02,  3.6396e-02, -6.3034e-02],
          [-1.8645e-03,  4.3630e-02, -2.6132e-02, -7.6036e-02,  7.8512e-02],
          [ 4.3217e-02, -3.1471e-02, -9.8552e-03,  3.9861e-02, -2.3391e-02],
          [ 2.8649e-02,  5.9830e-02,  2.7637e-02,  1.8659e-02, -5.6407e-02]],

         [[-4.5381e-02,  5.4505e-03,  4.0909e-03,  5.6494e-02, -5.9140e-02],
          [ 2.3393e-02, -3.3347e-02, -6.6707e-02,  1.9643e-02, -2.0795e-02],
          [-7.7018e-02,  3.0887e-02, -1.8568e-02,  6.2216e-02,  1.9621e-02],
          [ 3.4469e-02, -8.1499e-03,  3.7170e-03, -4.2050e-02, -1.1584e-02],
          [ 1.5811e-02,  7.1654e-02, -6.3917e-02,  2.9590e-02,  2.9235e-02]],

         [[ 3.9611e-02, -2.3142e-02,  3.7166e-02, -3.2922e-02, -7.0504e-02],
          [-5.1643e-03,  7.2051e-02,  4.7072e-02,  4.3785e-02,  3.0908e-02],
          [ 4.5853e-02, -5.6516e-02, -7.1385e-02,  4.2695e-02,  2.6823e-02],
          [-3.3464e-03, -5.1895e-02, -5.9452e-02, -1.4120e-02, -4.2594e-02],
          [-5.9216e-03, -7.9317e-02,  3.2249e-02,  2.3419e-02,  1.3252e-02]],

         [[ 1.1583e-02,  7.0880e-02,  1.9811e-02,  8.0881e-02, -4.4006e-02],
          [ 3.5248e-02, -2.2885e-02,  2.7275e-02,  4.8847e-02, -2.0026e-02],
          [ 3.0439e-02,  7.2296e-02,  7.4953e-02,  4.2624e-02,  3.7546e-02],
          [ 6.0431e-02, -7.8233e-02, -2.1510e-02,  7.9771e-02,  2.0746e-02],
          [-6.6449e-02,  8.1094e-02,  1.7321e-03,  9.2274e-03,  4.6537e-02]],

         [[-4.3946e-02,  3.4462e-02,  4.2152e-02, -3.2618e-02, -3.1919e-02],
          [ 3.7446e-02, -1.6665e-02, -4.6682e-02, -4.2671e-02,  3.8549e-02],
          [ 2.0830e-02,  3.1240e-02,  1.4524e-02,  6.9791e-02,  6.4641e-02],
          [-5.1961e-02,  2.9224e-02, -8.1169e-02,  5.0892e-02,  7.3306e-02],
          [-5.3219e-02,  4.4223e-02,  1.6751e-02,  1.0149e-02,  2.7877e-02]]],


        [[[ 4.0394e-02, -7.5706e-02,  4.0839e-02,  4.1527e-02,  7.5264e-02],
          [ 5.0450e-02,  2.5265e-02,  5.6446e-02, -9.1349e-03, -2.0079e-02],
          [ 4.0942e-03,  1.9982e-02,  7.8088e-02,  1.3781e-02,  5.9151e-02],
          [ 1.5582e-03, -7.1641e-02, -3.9163e-02,  2.0775e-02,  6.9971e-04],
          [ 1.4160e-02, -2.1367e-02,  7.2005e-02,  3.0974e-02,  5.5173e-02]],

         [[ 2.6252e-02,  2.1418e-02, -7.9839e-03, -6.4778e-02, -1.7517e-02],
          [ 3.0320e-02, -7.2231e-03, -6.9592e-02, -2.1982e-02, -7.9040e-02],
          [-6.8035e-02, -4.2568e-02,  6.6144e-03,  3.2699e-02,  4.0103e-03],
          [-7.6126e-02,  6.6878e-02,  2.0500e-02,  7.3015e-02,  4.4364e-02],
          [ 6.4838e-02, -6.1012e-02, -3.7262e-02, -2.4985e-02,  4.3699e-02]],

         [[ 2.9414e-02, -6.4046e-02, -7.9652e-02,  5.7636e-02,  1.8768e-02],
          [ 1.3999e-02,  6.1029e-04, -2.5749e-02,  5.8838e-02, -7.4751e-02],
          [ 6.9579e-02,  4.0549e-02,  2.6570e-02,  1.9762e-02,  6.2969e-02],
          [-3.0153e-02,  4.1174e-02, -1.4508e-02, -7.5177e-02,  6.0151e-02],
          [ 3.5434e-02, -3.4538e-02,  9.2284e-04, -6.9311e-02,  5.5276e-02]],

         [[-1.1414e-02,  5.8701e-02, -8.5818e-03,  5.3662e-02, -6.6120e-02],
          [-6.6017e-02, -7.2396e-02,  3.9557e-02, -3.2917e-02,  2.3871e-02],
          [ 3.6651e-02, -5.7215e-02,  7.5414e-02, -2.8045e-02, -5.4822e-03],
          [ 6.6979e-03,  4.3628e-02, -7.9500e-02,  1.6581e-02, -2.0443e-03],
          [ 4.8128e-02, -2.5281e-03,  6.3143e-02,  5.5622e-02,  5.0823e-02]],

         [[ 2.6724e-02, -4.4536e-02,  3.8301e-02, -1.6428e-02,  7.6386e-03],
          [ 6.9414e-02,  6.4933e-02, -6.0955e-02,  4.2710e-02,  6.7609e-02],
          [ 3.2155e-02, -3.9059e-02, -8.1267e-02,  6.0943e-02,  7.8761e-02],
          [ 9.2058e-03, -6.8716e-02,  5.9567e-02, -4.7019e-02,  4.3247e-02],
          [-8.0727e-02, -7.7734e-02,  6.4857e-02,  1.4899e-03,  4.7743e-02]],

         [[ 4.5334e-02,  1.9591e-02,  4.8368e-02, -7.7771e-02, -2.2575e-02],
          [ 2.3952e-02, -7.0499e-02, -6.3645e-02, -1.8650e-02, -2.2908e-02],
          [ 4.0042e-02,  5.1475e-02, -4.9496e-02, -7.1370e-02, -7.3195e-02],
          [ 7.5049e-02, -5.0792e-02,  7.1668e-02,  2.3936e-02,  7.4133e-02],
          [ 7.1541e-03,  3.1869e-02, -7.6662e-02, -1.1101e-02, -6.0137e-02]]],


        [[[-3.1704e-02, -6.3101e-02,  5.3376e-02, -7.5087e-02,  7.4585e-02],
          [ 3.3090e-03, -1.2829e-02,  4.2248e-03, -3.8829e-02,  2.2651e-02],
          [-3.0851e-02,  6.0922e-02,  6.3309e-02, -1.0301e-02, -3.4664e-04],
          [ 3.7358e-03,  5.7330e-02,  7.6840e-02,  4.9586e-02, -5.9538e-02],
          [ 3.8326e-02, -6.2273e-02,  3.8199e-02,  9.8926e-03, -6.3158e-02]],

         [[ 3.4346e-02, -3.0442e-02,  9.5122e-03,  5.9389e-03,  6.4301e-02],
          [ 3.7045e-02, -1.4779e-02,  5.3899e-02,  1.2880e-02,  3.2471e-02],
          [-4.9418e-02, -7.3648e-02, -1.2379e-02,  6.0696e-02, -4.2189e-03],
          [-5.9245e-03, -2.1985e-02,  3.4781e-02,  6.4471e-02, -6.9246e-02],
          [-5.3104e-02,  4.7656e-02, -4.7472e-02,  5.0731e-02,  6.2965e-02]],

         [[-7.3520e-02, -2.9047e-02,  2.4189e-02,  9.0452e-03,  3.1695e-02],
          [-5.4734e-02,  2.8641e-02, -6.1692e-02, -2.3195e-02,  1.1079e-02],
          [ 5.4220e-02, -7.6615e-02,  5.6999e-02,  2.3648e-03, -4.8024e-02],
          [ 2.4688e-02,  2.6944e-02, -5.6788e-02,  4.2183e-02, -2.4088e-02],
          [ 5.4214e-03, -1.7575e-02,  6.7586e-02,  5.1225e-02,  2.3619e-02]],

         [[-2.5996e-03, -7.9755e-02,  6.8051e-02,  7.2919e-02,  7.9731e-02],
          [-5.1041e-02, -2.0511e-02,  4.2875e-02, -3.9107e-02,  7.6118e-02],
          [ 9.0300e-03,  3.5622e-02,  1.0749e-02, -1.6854e-02, -5.9495e-02],
          [ 3.1847e-02,  7.7569e-02,  3.8437e-02, -3.2095e-02,  8.4368e-03],
          [ 2.2344e-02,  5.6201e-02,  4.6436e-02, -6.0905e-02, -6.2908e-02]],

         [[ 1.1728e-02, -2.6272e-03, -3.3887e-02, -4.3492e-02,  1.4497e-02],
          [ 3.9038e-02, -6.3376e-02, -2.3013e-02, -4.0424e-02,  1.4657e-02],
          [ 4.9225e-03, -3.3368e-02,  1.8460e-02, -3.6485e-02, -6.4865e-02],
          [-6.4731e-05, -6.8108e-02, -1.3410e-02, -3.4235e-02, -6.4206e-02],
          [-1.7974e-02,  3.3237e-02,  5.5293e-02, -3.7493e-02,  2.4666e-02]],

         [[ 6.7512e-02, -8.9689e-03, -2.2933e-02,  1.6010e-02,  6.9638e-02],
          [-3.7105e-02,  6.7179e-02, -1.0214e-02, -4.8984e-02, -4.1328e-02],
          [-8.6841e-03, -1.7692e-02, -1.4424e-02,  7.3771e-02,  1.2215e-02],
          [-7.9845e-02,  6.7047e-02,  3.1997e-02, -5.1304e-02,  6.0482e-02],
          [-1.4893e-02, -2.7676e-02,  5.4934e-02,  6.3990e-02,  2.7662e-02]]],


        ...,


        [[[ 1.0199e-02, -5.2322e-02, -3.7206e-02, -1.4544e-02, -4.6219e-02],
          [-1.2879e-02,  4.7828e-02,  7.8953e-02, -1.3771e-02,  2.3383e-02],
          [-2.0076e-02, -2.9035e-02,  5.4802e-02,  3.0305e-02, -2.6655e-02],
          [ 4.6331e-02,  6.4619e-02,  3.6669e-02, -5.7059e-02, -1.2394e-02],
          [-4.7441e-02,  3.9712e-02, -6.8904e-02, -7.4229e-02, -5.4093e-03]],

         [[-3.7258e-02,  8.1076e-02, -7.6125e-02, -2.9282e-02,  1.0107e-02],
          [ 2.3341e-02,  2.7966e-02,  4.9232e-02,  4.2246e-02, -2.2956e-02],
          [ 6.1198e-02,  5.5776e-02, -6.1231e-02, -7.3973e-02, -1.8083e-02],
          [ 2.4045e-02,  5.3863e-02,  3.1777e-02, -3.6017e-02,  7.9472e-02],
          [-6.9948e-02,  6.3699e-02,  4.3250e-02,  4.6033e-02,  8.9336e-04]],

         [[ 2.5100e-02,  5.6652e-03, -3.4297e-02, -4.4208e-02, -3.4073e-03],
          [ 1.6430e-02, -5.2721e-02,  3.6158e-02,  7.7301e-02,  4.7632e-03],
          [ 7.6485e-02, -4.3372e-03,  4.2569e-02, -3.6177e-02, -2.1329e-03],
          [-8.4518e-03, -9.9161e-03, -2.1800e-02, -6.9410e-02,  5.5372e-02],
          [ 7.8497e-02,  6.9532e-02, -1.9760e-03, -4.9298e-02, -3.0007e-02]],

         [[ 5.7926e-02, -1.7255e-02,  5.0323e-02,  7.3531e-03,  7.3418e-02],
          [ 5.8616e-02,  3.4145e-02,  6.8296e-02, -8.6052e-03, -1.9639e-02],
          [-7.3903e-03,  3.6604e-02, -6.6443e-02,  4.2403e-02, -1.6865e-02],
          [-4.4089e-02, -3.2403e-02, -3.5313e-02,  6.2766e-02,  4.4323e-02],
          [-7.7840e-02, -5.1307e-02,  3.1470e-02,  2.4083e-02, -3.1171e-02]],

         [[-7.2114e-02,  4.7854e-02, -3.7292e-02,  3.6088e-02, -1.2283e-02],
          [ 2.4217e-02,  4.2792e-02,  4.0946e-02, -4.8699e-02, -2.0729e-02],
          [ 4.8880e-02, -8.1478e-02, -7.3582e-02, -2.0240e-02,  5.9753e-02],
          [ 3.2898e-02, -1.7339e-02, -5.6911e-02,  6.5161e-02,  1.7858e-02],
          [ 3.9480e-02, -3.2163e-02,  3.1307e-02, -5.5870e-02, -1.6457e-02]],

         [[ 2.2531e-02, -8.0621e-02,  6.8040e-02, -5.3297e-02,  6.8076e-02],
          [-2.8019e-02, -4.2122e-02,  9.4667e-03,  1.1930e-02,  5.3379e-02],
          [ 7.0529e-03, -7.5513e-02, -5.8059e-03, -2.2628e-02, -3.1337e-02],
          [ 4.6129e-02,  1.0021e-02,  1.3801e-02,  5.4890e-02,  8.2099e-03],
          [-3.4219e-02, -3.5771e-02,  4.9384e-02,  4.2971e-03,  1.0185e-02]]],


        [[[ 1.5722e-02, -1.1761e-02, -5.5889e-02, -4.0064e-02,  4.6955e-02],
          [ 3.8323e-02,  1.4971e-02,  4.7523e-02, -7.3487e-02, -6.6887e-02],
          [ 5.4269e-02,  6.5048e-03, -2.5124e-02,  7.0294e-02,  2.8026e-02],
          [-7.3427e-02, -6.3417e-02, -6.9835e-02,  5.1313e-02, -3.0339e-02],
          [-3.7274e-02, -6.8501e-02,  1.3327e-02,  7.0011e-03,  7.3245e-02]],

         [[ 3.7582e-02,  8.0081e-02, -4.8837e-02,  7.3556e-03,  6.0067e-02],
          [-3.2691e-02, -4.5259e-02, -6.4128e-02, -5.0167e-02, -2.3580e-02],
          [-5.0516e-02, -7.3288e-02, -5.1057e-02, -4.8879e-02,  1.4947e-02],
          [ 5.5937e-02,  2.8608e-02, -5.1257e-02, -9.8808e-03,  7.7368e-03],
          [-6.1867e-03,  5.5593e-02, -6.4040e-02, -1.9817e-02,  4.8827e-02]],

         [[ 7.0229e-02,  3.4918e-02,  3.4894e-02, -3.7108e-02,  5.1772e-02],
          [-8.0578e-02, -1.9076e-02, -5.2325e-02, -3.1958e-02, -1.4594e-02],
          [-6.5627e-02, -4.4570e-02,  7.8603e-02, -1.8659e-02, -4.3064e-02],
          [-5.8323e-02,  2.8146e-02,  2.7508e-02, -2.0674e-03, -2.3774e-02],
          [ 5.3830e-02,  3.1012e-03, -1.3121e-02,  4.2424e-02,  1.3136e-03]],

         [[ 6.2128e-02, -8.1062e-02,  5.5346e-03,  4.9415e-02,  1.9914e-02],
          [ 4.5311e-02,  4.5756e-02, -1.6908e-02,  1.4688e-02,  3.0094e-02],
          [-4.5764e-03, -3.1909e-02,  6.0014e-02, -6.2276e-02, -3.4248e-02],
          [ 3.9257e-02,  1.9695e-02, -8.7933e-03,  7.9642e-02, -3.3138e-02],
          [-1.9051e-02,  4.8750e-02, -4.9173e-03,  7.9091e-02,  1.4798e-02]],

         [[-5.5164e-02,  3.3881e-02, -3.4420e-04,  6.3908e-02,  7.2785e-02],
          [ 3.9900e-02,  6.5517e-03, -8.0552e-02, -1.5329e-02, -5.7844e-02],
          [-6.8076e-03,  5.0193e-02,  7.4728e-02, -2.9026e-02,  2.0508e-02],
          [ 6.2877e-02,  8.0455e-02,  5.1410e-02, -3.8253e-02, -6.9830e-02],
          [-1.4001e-02, -6.4940e-02,  4.3407e-03, -2.0574e-03, -6.3191e-02]],

         [[ 1.8252e-02,  8.8192e-03,  1.6308e-02,  8.0085e-02,  5.4516e-02],
          [ 3.2235e-02, -1.4626e-02, -5.4716e-02,  7.6015e-02, -5.5979e-02],
          [ 3.0786e-02,  3.1961e-02, -1.8162e-02,  6.9846e-02,  3.8360e-02],
          [ 7.4391e-02,  1.7810e-02,  5.8638e-02, -2.3435e-02, -5.3262e-02],
          [ 7.7326e-02, -2.3349e-02, -7.6374e-02, -1.5019e-02, -4.1779e-02]]],


        [[[-4.0523e-02, -1.3673e-02,  1.7464e-02, -8.0872e-02,  1.3032e-02],
          [-5.7647e-02,  7.6154e-02,  2.9756e-02,  6.1005e-02, -6.2796e-02],
          [-1.8865e-02, -3.1772e-02, -3.0434e-02, -6.4698e-02, -7.2306e-02],
          [-1.3506e-02,  7.4316e-02,  3.0436e-02, -1.4313e-02,  7.2392e-02],
          [-3.5933e-02,  1.1374e-02, -1.8395e-02, -3.0843e-02,  5.5321e-02]],

         [[ 1.4394e-02,  3.6295e-02,  4.9757e-02, -4.6856e-03,  9.4505e-03],
          [-5.9689e-02,  5.7720e-02,  7.4232e-02,  7.1351e-02,  3.7225e-02],
          [ 3.1824e-02, -3.5381e-02, -3.4969e-02,  8.2402e-03, -2.4470e-02],
          [-7.7093e-02,  4.4498e-02,  4.7004e-02, -7.2011e-02, -3.8237e-02],
          [-6.7163e-02, -1.7111e-02,  5.9214e-02,  7.7507e-02, -4.4913e-02]],

         [[-8.0340e-02,  1.9304e-02,  1.0000e-03,  4.0900e-03, -6.6380e-02],
          [-6.6810e-02, -6.3729e-03,  2.0123e-02, -4.3774e-02, -3.4981e-02],
          [-6.8541e-02,  1.6689e-02, -7.1792e-02,  1.5246e-02, -6.8854e-04],
          [ 4.3843e-02,  2.6870e-02, -3.8101e-04, -7.8826e-02,  6.5015e-02],
          [-2.0440e-02,  7.9386e-02, -3.8219e-03, -2.4580e-02,  4.0634e-02]],

         [[ 5.5544e-02, -3.7836e-02, -7.9338e-02,  7.0619e-02, -3.3446e-02],
          [-3.2902e-03, -4.8389e-02,  4.7453e-02,  5.8127e-02, -4.8315e-03],
          [-5.6379e-02, -5.8292e-02, -3.9011e-02, -4.0794e-03,  4.6537e-02],
          [ 8.0529e-02,  6.2578e-02,  2.7042e-02, -7.3989e-02,  8.9902e-03],
          [-3.2401e-02, -6.5050e-02,  6.9688e-02, -6.6803e-02,  3.6991e-02]],

         [[-2.2901e-02,  4.9715e-02, -5.3222e-02, -7.6049e-02, -6.6969e-02],
          [ 9.8178e-03, -6.9489e-02, -3.7907e-02,  3.8849e-02, -3.6555e-02],
          [-4.1870e-02,  1.7604e-02,  1.3747e-02,  1.2642e-02, -2.0011e-02],
          [-6.3959e-02,  6.5200e-02, -3.1475e-02, -5.4463e-02, -7.0515e-02],
          [-3.3264e-02,  5.0797e-04,  4.7155e-02,  5.2026e-02, -6.2295e-02]],

         [[-7.0623e-02, -3.8786e-02, -1.6983e-02, -4.0680e-02, -3.5289e-03],
          [ 6.6456e-02, -5.6733e-02, -3.2044e-02, -8.0967e-02,  6.8003e-02],
          [ 7.7967e-02, -1.8929e-02,  4.1282e-02, -4.3998e-02,  7.8460e-02],
          [ 4.9829e-02, -3.3562e-02, -7.5281e-02,  5.1357e-02,  2.8261e-02],
          [ 6.7955e-02, -3.4526e-02, -4.7810e-02,  7.8384e-02, -1.2523e-02]]]])
conv2.bias 
 tensor([-0.0807, -0.0406, -0.0171,  0.0340,  0.0777,  0.0155,  0.0788, -0.0089,
        -0.0693, -0.0276,  0.0578,  0.0165,  0.0525, -0.0673, -0.0171,  0.0804])
fc1.weight 
 tensor([[-0.0032, -0.0052, -0.0235,  ..., -0.0029, -0.0208,  0.0015],
        [-0.0460,  0.0396,  0.0050,  ...,  0.0194,  0.0410,  0.0174],
        [-0.0391, -0.0378, -0.0265,  ...,  0.0376,  0.0157,  0.0217],
        ...,
        [-0.0304, -0.0122,  0.0140,  ..., -0.0028,  0.0002, -0.0292],
        [ 0.0346,  0.0187,  0.0185,  ..., -0.0256,  0.0434, -0.0359],
        [-0.0116,  0.0302,  0.0412,  ...,  0.0163, -0.0479, -0.0163]])
fc1.bias 
 tensor([-0.0453,  0.0006,  0.0223,  0.0062, -0.0188,  0.0133,  0.0158, -0.0061,
         0.0145,  0.0035, -0.0145, -0.0451,  0.0352,  0.0302, -0.0086,  0.0333,
         0.0025,  0.0102,  0.0128, -0.0002, -0.0021,  0.0439,  0.0468,  0.0120,
        -0.0089,  0.0115, -0.0260,  0.0178,  0.0217,  0.0456, -0.0116,  0.0224,
        -0.0207, -0.0060, -0.0386,  0.0405, -0.0180, -0.0116, -0.0009,  0.0239,
        -0.0203, -0.0064,  0.0406, -0.0173,  0.0378,  0.0180,  0.0195,  0.0145,
        -0.0435,  0.0157,  0.0363, -0.0170,  0.0064, -0.0107,  0.0341, -0.0227,
         0.0179, -0.0221, -0.0264, -0.0018, -0.0104, -0.0481, -0.0343, -0.0245,
        -0.0190, -0.0425,  0.0329, -0.0488,  0.0279,  0.0034, -0.0491, -0.0422,
         0.0237,  0.0211, -0.0260, -0.0380,  0.0010, -0.0025, -0.0367,  0.0386,
        -0.0090,  0.0157, -0.0292, -0.0302,  0.0423, -0.0476, -0.0401,  0.0195,
         0.0315,  0.0285,  0.0456,  0.0297,  0.0091, -0.0291,  0.0301, -0.0231,
        -0.0055,  0.0359,  0.0164, -0.0297,  0.0243, -0.0429,  0.0311,  0.0072,
        -0.0400, -0.0473,  0.0470, -0.0068, -0.0160,  0.0092, -0.0327, -0.0040,
         0.0245,  0.0178, -0.0249, -0.0146,  0.0401,  0.0019,  0.0216, -0.0497])
fc2.weight 
 tensor([[ 0.0327, -0.0668,  0.0384,  ..., -0.0704,  0.0015, -0.0560],
        [-0.0657, -0.0632,  0.0364,  ...,  0.0721,  0.0100,  0.0768],
        [ 0.0130,  0.0838, -0.0002,  ...,  0.0178,  0.0226, -0.0903],
        ...,
        [ 0.0313, -0.0549,  0.0724,  ...,  0.0874, -0.0439, -0.0327],
        [ 0.0371, -0.0318,  0.0875,  ...,  0.0631, -0.0813,  0.0321],
        [-0.0071,  0.0583,  0.0542,  ..., -0.0285,  0.0750,  0.0572]])
fc2.bias 
 tensor([ 0.0134, -0.0475,  0.0059,  0.0127,  0.0480, -0.0361,  0.0095,  0.0558,
         0.0322,  0.0769,  0.0570, -0.0611, -0.0341, -0.0588, -0.0095,  0.0799,
        -0.0563,  0.0562, -0.0833, -0.0296,  0.0359, -0.0193, -0.0529, -0.0428,
        -0.0597, -0.0521,  0.0202, -0.0128, -0.0195,  0.0684, -0.0416,  0.0795,
         0.0608,  0.0296, -0.0304, -0.0381, -0.0834,  0.0112,  0.0332,  0.0466,
        -0.0837, -0.0375,  0.0573, -0.0380,  0.0802, -0.0668,  0.0514, -0.0070,
         0.0033,  0.0871, -0.0178,  0.0522, -0.0571,  0.0472,  0.0561,  0.0215,
         0.0012, -0.0419, -0.0055, -0.0174, -0.0204, -0.0619, -0.0016, -0.0784,
        -0.0819, -0.0562, -0.0807,  0.0765,  0.0316,  0.0423, -0.0063, -0.0210,
         0.0315,  0.0806,  0.0109, -0.0175,  0.0705, -0.0466,  0.0097,  0.0098,
         0.0798,  0.0796, -0.0783, -0.0730])
fc3.weight 
 tensor([[-0.0351,  0.0782,  0.0364, -0.0547,  0.0099, -0.0188, -0.0582, -0.0056,
         -0.1074,  0.0905, -0.0629,  0.1084, -0.0456, -0.0382,  0.0472, -0.0674,
         -0.0414, -0.0523, -0.0009, -0.0946, -0.0463,  0.0898,  0.0631, -0.0910,
          0.0091, -0.0383, -0.0311,  0.0978,  0.0204, -0.1040, -0.0793, -0.1019,
          0.0631, -0.0136, -0.0283, -0.0260, -0.1050,  0.0029, -0.1016, -0.0253,
         -0.0350,  0.0862,  0.0013,  0.0482, -0.0284,  0.0073,  0.0053,  0.0703,
          0.0078, -0.0487,  0.0899, -0.0934,  0.0385, -0.0764,  0.0070, -0.0486,
          0.0197,  0.0372, -0.0763,  0.0477, -0.1010, -0.0224, -0.0491, -0.0548,
         -0.0461,  0.0629,  0.0691,  0.0064, -0.0637, -0.0727,  0.0074, -0.0961,
         -0.0429,  0.0362,  0.0140,  0.0659, -0.0640,  0.0336,  0.1020, -0.0572,
          0.0779,  0.0741, -0.0588, -0.0856],
        [ 0.0477, -0.0715,  0.0228,  0.0505,  0.0311, -0.0720,  0.0702,  0.0573,
          0.0241,  0.0151, -0.1036, -0.0023,  0.0145,  0.0096, -0.0538,  0.0805,
          0.0951,  0.1079, -0.0928,  0.0317, -0.0034, -0.0983,  0.0781,  0.0109,
         -0.0001, -0.0881,  0.0208,  0.0635,  0.0434,  0.1080, -0.0625, -0.0924,
          0.0947,  0.0438,  0.0596, -0.0324, -0.0555, -0.0729,  0.0614, -0.0416,
         -0.0770, -0.0748, -0.0193, -0.0991, -0.0900,  0.0201,  0.0715, -0.0605,
          0.0033, -0.0418,  0.0473, -0.1069,  0.0095, -0.0540,  0.0695,  0.1042,
          0.0245,  0.0715,  0.1073,  0.0643, -0.0956, -0.0843,  0.0391,  0.0033,
          0.0787, -0.1019, -0.0056, -0.0830,  0.0327,  0.0031, -0.0049, -0.0971,
          0.0549, -0.0827, -0.0933, -0.0676, -0.0909, -0.0925,  0.0300,  0.0154,
          0.0706, -0.0158, -0.0479, -0.0853],
        [-0.0502, -0.0625,  0.0947, -0.0247,  0.0150, -0.0101,  0.0695,  0.0064,
          0.0390, -0.0735, -0.0347,  0.0255, -0.0964,  0.0016, -0.0568,  0.1050,
         -0.0972,  0.0176,  0.0756, -0.0781,  0.0529, -0.0751,  0.0823,  0.0953,
          0.0651, -0.0740, -0.0460,  0.0549, -0.0199, -0.0245, -0.1058,  0.0528,
         -0.1070,  0.0086,  0.0665, -0.0895,  0.0798, -0.0902,  0.1072,  0.0173,
          0.0257,  0.0481, -0.0289,  0.0291, -0.1057, -0.0429, -0.1033, -0.0716,
         -0.0690,  0.0628,  0.0718, -0.1040, -0.1042,  0.0831,  0.0844,  0.1073,
          0.0846,  0.0477,  0.0619,  0.0479, -0.0872, -0.0647,  0.0556,  0.0539,
         -0.0421, -0.0697,  0.0625,  0.0688,  0.0778,  0.0239, -0.0431,  0.0142,
          0.0107,  0.0207,  0.0443, -0.0164,  0.0764, -0.0577, -0.0278, -0.0256,
          0.0416, -0.0133, -0.0185,  0.0236],
        [ 0.1058,  0.0798,  0.0002,  0.0795,  0.1071, -0.0654,  0.0125,  0.0800,
         -0.0396, -0.0300,  0.0885,  0.0236, -0.0025, -0.0938, -0.0494,  0.0230,
          0.0239, -0.0673, -0.0466,  0.0425,  0.0175,  0.0395,  0.1038,  0.0651,
         -0.0052,  0.0480, -0.0311, -0.0081,  0.0582,  0.0096,  0.0950,  0.0721,
         -0.0648,  0.0789, -0.0936,  0.0235, -0.0654, -0.0109,  0.0745,  0.0503,
          0.0175,  0.0719,  0.1069, -0.0279, -0.0692, -0.0972,  0.0624, -0.0348,
          0.0703,  0.0675,  0.0817, -0.0586,  0.0760, -0.0307, -0.0205, -0.0042,
          0.0939,  0.0528, -0.0242, -0.0632, -0.0184, -0.0781,  0.0133,  0.0246,
          0.0962, -0.0454,  0.0117, -0.0783,  0.0378,  0.0393,  0.0874,  0.0969,
         -0.1045,  0.0167, -0.0041, -0.0450, -0.0738, -0.0291, -0.0480,  0.1086,
          0.0284, -0.0981,  0.0933,  0.0159],
        [-0.1059,  0.0177, -0.0834,  0.0280, -0.0464,  0.0020,  0.0380,  0.1060,
          0.0865, -0.0857, -0.0820, -0.0471,  0.0236, -0.0603,  0.0667,  0.0142,
          0.0905, -0.0847, -0.0254, -0.0391, -0.0173, -0.1018, -0.0318,  0.0223,
         -0.0592,  0.0776,  0.0137, -0.0570, -0.0139,  0.0646, -0.0869, -0.0624,
         -0.0882,  0.0348,  0.0161, -0.0336,  0.0755,  0.1023, -0.1000, -0.0267,
         -0.1080,  0.0422, -0.0045,  0.0512, -0.0861, -0.0749,  0.1039, -0.0755,
          0.0404,  0.0974, -0.0386, -0.0557,  0.0769, -0.0065,  0.0711,  0.0475,
          0.0997,  0.0045,  0.0312, -0.0187, -0.1067, -0.0217,  0.0200, -0.0657,
          0.0338,  0.0409,  0.0502,  0.0765, -0.0575,  0.0739, -0.0023, -0.0798,
          0.0923,  0.0901, -0.0271,  0.0934,  0.0941, -0.0731,  0.0918,  0.0247,
         -0.0655, -0.0685, -0.0875,  0.0479],
        [-0.0871,  0.0840, -0.0604, -0.0193, -0.0877, -0.0734,  0.0127, -0.0357,
          0.0891, -0.0521, -0.0469, -0.0877, -0.0775,  0.0123, -0.0194, -0.1078,
          0.0492, -0.1044, -0.0714,  0.0606,  0.0053, -0.0227,  0.0231, -0.0439,
         -0.0554,  0.0112, -0.0717,  0.0829, -0.0140,  0.0908, -0.0258,  0.0669,
         -0.0290, -0.0555,  0.1082, -0.0481, -0.0175, -0.0107, -0.0297, -0.0472,
         -0.0606, -0.0301, -0.0846, -0.0839,  0.0077,  0.0267,  0.0926, -0.0119,
         -0.0195,  0.0077,  0.0336,  0.0981,  0.0040, -0.0447, -0.0131, -0.1037,
         -0.1037,  0.0380,  0.0343,  0.1038,  0.0378, -0.0402, -0.0581, -0.0523,
          0.0604,  0.0402, -0.0838, -0.0718, -0.0488,  0.0242,  0.0727, -0.0429,
          0.0225, -0.0209,  0.0964,  0.0810, -0.0704,  0.0720, -0.0120,  0.0704,
         -0.0779,  0.0169, -0.0885,  0.0133],
        [ 0.0809, -0.0432, -0.0358,  0.0023, -0.0809, -0.0058, -0.0977,  0.0731,
          0.0576, -0.0883,  0.0758,  0.0168, -0.0107, -0.0284,  0.0640,  0.0921,
         -0.1068, -0.0934,  0.0853,  0.0940, -0.0152,  0.0899,  0.0431,  0.0047,
         -0.0164, -0.0339,  0.0470,  0.0021, -0.0960,  0.1058,  0.0646, -0.0491,
         -0.1037,  0.0242,  0.0083,  0.0221, -0.0810, -0.0367, -0.0478, -0.0849,
         -0.0836,  0.0972, -0.0792, -0.1085, -0.0072, -0.0512, -0.0573, -0.0067,
          0.0944,  0.0517,  0.0925,  0.0974,  0.0239,  0.1011, -0.1045, -0.0396,
          0.0565, -0.0717, -0.0250, -0.0896,  0.0717,  0.0155, -0.0818, -0.0084,
         -0.0022, -0.1046, -0.0607, -0.0657, -0.0968,  0.0873, -0.0598, -0.0593,
          0.0558, -0.1064,  0.0574,  0.1044, -0.0233, -0.0505,  0.1041,  0.1085,
         -0.0538, -0.0595,  0.0956,  0.0604],
        [ 0.1015, -0.0722,  0.0488,  0.0101,  0.0202, -0.0593,  0.0623, -0.0953,
          0.0665,  0.1086, -0.1021,  0.0690,  0.0366, -0.0012,  0.0754,  0.0651,
          0.0005,  0.0642,  0.0149,  0.0153, -0.0449, -0.0533, -0.0103, -0.0387,
         -0.0602,  0.1004,  0.0283,  0.0476,  0.0596,  0.0504,  0.0366, -0.0626,
         -0.0361, -0.0263, -0.1079,  0.0652, -0.1043,  0.0658, -0.1018,  0.0300,
         -0.0766, -0.0371, -0.0634, -0.0091, -0.0625,  0.0402, -0.0438, -0.0524,
         -0.0687,  0.0636,  0.0641,  0.0817, -0.1090,  0.0409, -0.0558,  0.1062,
          0.0414, -0.0629,  0.0131, -0.0563,  0.0378,  0.0774, -0.0750,  0.0777,
          0.0578, -0.0936,  0.0269, -0.0396, -0.0964, -0.0894, -0.0125, -0.0236,
          0.0014, -0.0819, -0.0124,  0.0247,  0.1023, -0.0884,  0.0624, -0.0268,
          0.0912,  0.0312, -0.0989, -0.0456],
        [-0.0516, -0.0043, -0.0102,  0.0299,  0.0665, -0.0009,  0.1026, -0.0169,
          0.0006,  0.0715,  0.1036,  0.0087,  0.0707, -0.0396, -0.0476, -0.0896,
          0.0608, -0.0180, -0.0367, -0.0653,  0.0630,  0.1080,  0.0807, -0.0254,
         -0.0631, -0.0055,  0.0611,  0.0971,  0.0037,  0.0689,  0.0861,  0.0770,
          0.0932,  0.0045, -0.0973, -0.0262, -0.0513, -0.0835, -0.0036,  0.0355,
         -0.0524, -0.0042,  0.0043,  0.0252,  0.0437, -0.0898, -0.0324,  0.0382,
          0.0322,  0.0678,  0.0735,  0.0450, -0.0094, -0.0311,  0.0541, -0.0942,
          0.0945,  0.0062,  0.0225,  0.0156, -0.0396,  0.0228,  0.0718, -0.0720,
         -0.1054,  0.0230,  0.0869,  0.0996,  0.0456,  0.0548,  0.0861,  0.0787,
          0.0159,  0.0857, -0.0790, -0.0252,  0.0639,  0.0059,  0.0513,  0.0966,
         -0.0251, -0.0521, -0.0341, -0.0758],
        [-0.0203, -0.0479, -0.0209, -0.0015, -0.0537, -0.0816, -0.0397,  0.0425,
         -0.0706, -0.0772,  0.0870,  0.0587, -0.0714,  0.0805, -0.0104,  0.0030,
         -0.0112, -0.0341,  0.0635, -0.0202,  0.0839, -0.0273,  0.0969, -0.1013,
         -0.0227,  0.0159, -0.0036,  0.0428, -0.0474,  0.0933, -0.0499,  0.0124,
         -0.0043,  0.0435,  0.0785, -0.0419,  0.0231,  0.1041, -0.0128, -0.0573,
          0.0456,  0.0235,  0.0565, -0.0868, -0.1073, -0.0534,  0.0074, -0.0648,
         -0.0178, -0.0929,  0.1065,  0.0231, -0.0270, -0.0733,  0.0656,  0.0857,
         -0.0941,  0.0018, -0.0426, -0.0115,  0.0978, -0.0780, -0.0622,  0.0838,
         -0.0606, -0.0661, -0.0331,  0.0457, -0.0763, -0.0903, -0.0381, -0.0198,
         -0.0369, -0.0135, -0.1033,  0.1031,  0.0206,  0.0382, -0.1005, -0.0188,
         -0.0711, -0.0649,  0.0517, -0.0060]])
fc3.bias 
 tensor([ 0.0853, -0.1027, -0.0893, -0.0900,  0.0732,  0.0718, -0.0705,  0.0435,
         0.0994,  0.0521])
SAVE MODEL to /home/yongqiang/pytorch_work/end2end-asr-pytorch-example/log/epoch_9.th

Process finished with exit code 0
(pt-1.4_py-3.6) yongqiang@yongqiang:~/pytorch_work/end2end-asr-pytorch-example/log$ ll
total 352
drwxrwxrwx 1 yongqiang yongqiang    512 Jun 20 16:01 ./
drwxrwxrwx 1 yongqiang yongqiang    512 Jun 20 16:01 ../
-rw-rw-rw- 1 yongqiang yongqiang 250027 Jun 20 16:01 epoch_9.th
(pt-1.4_py-3.6) yongqiang@yongqiang:~/pytorch_work/end2end-asr-pytorch-example/log$
(pt-1.4_py-3.6) yongqiang@yongqiang:~/pytorch_work/end2end-asr-pytorch-example/log$ du -sh *
352K    epoch_9.th
(pt-1.4_py-3.6) yongqiang@yongqiang:~/pytorch_work/end2end-asr-pytorch-example/log$

4. torch.load(load_path)

#!/usr/bin/env python
# -*- coding: utf-8 -*-
# yongqiang cheng

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import os

import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim


def load_model(load_path):
    """
    Loading model
    args:
        load_path: string
    """
    checkpoint = torch.load(load_path)

    epoch = checkpoint['epoch']
    metrics = checkpoint['metrics']
    if 'args' in checkpoint:
        args = checkpoint['args']

    label2id = checkpoint['label2id']
    id2label = checkpoint['id2label']

    model = init_transformer_model(args, label2id, id2label)
    model.load_state_dict(checkpoint['model_state_dict'])
    if args.cuda:
        model = model.cuda()

    opt = init_optimizer(args, model)
    if opt is not None:
        opt.optimizer.load_state_dict(checkpoint['optimizer_state_dict'])
        if constant.args.loss == "ce":
            opt._step = checkpoint['optimizer_params']['_step']
            opt._rate = checkpoint['optimizer_params']['_rate']
            opt.warmup = checkpoint['optimizer_params']['warmup']
            opt.factor = checkpoint['optimizer_params']['factor']
            opt.model_size = checkpoint['optimizer_params']['model_size']
        elif constant.args.loss == "ctc":
            opt.lr = checkpoint['optimizer_params']['lr']
            opt.lr_anneal = checkpoint['optimizer_params']['lr_anneal']
        else:
            print("Need to define loss type")

    return model, opt, epoch, metrics, args, label2id, id2label


def save_model(model, epoch, optimizer, save_folder, name, loss, best_model=False):
    """
    Saving model
    """

    if best_model:
        save_path = "{}/{}/best_model.th".format(save_folder, name)
    else:
        save_path = "{}/{}/epoch_{}.th".format(save_folder, name, epoch)

    if not os.path.exists(save_folder + "/" + name):
        os.makedirs(save_folder + "/" + name)

    print("SAVE MODEL to", save_path)
    if loss == "ce":
        config = {
            'label2id': "label2id",
            'id2label': "id2label",
            'args': "args",
            'epoch': "epoch",
            'model_state_dict': model.state_dict(),
            'optimizer_state_dict': optimizer.state_dict(),
            'optimizer_params': {
                '_step': "_step",
                '_rate': "_rate",
                'warmup': "warmup",
                'factor': "factor",
                'model_size': "model_size"
            },
            'metrics': "metrics"
        }
    elif loss == "ctc":
        config = {
            'label2id': "label2id",
            'id2label': "id2label",
            'args': "args",
            'epoch': epoch,
            'model_state_dict': model.state_dict(),
            'optimizer_state_dict': optimizer.state_dict(),
            'optimizer_params': {
                'lr': "lr",
                'lr_anneal': "lr_anneal"
            },
            'metrics': "metrics"
        }
    else:
        print("Loss is not defined")

    torch.save(config, save_path)


# Define model
class TheModelClass(nn.Module):
    def __init__(self):
        super(TheModelClass, self).__init__()
        self.conv1 = nn.Conv2d(3, 6, 5)
        self.pool = nn.MaxPool2d(2, 2)
        self.conv2 = nn.Conv2d(6, 16, 5)
        self.fc1 = nn.Linear(16 * 5 * 5, 120)
        self.fc2 = nn.Linear(120, 84)
        self.fc3 = nn.Linear(84, 10)

    def forward(self, x):
        x = self.pool(F.relu(self.conv1(x)))
        x = self.pool(F.relu(self.conv2(x)))
        x = x.view(-1, 16 * 5 * 5)
        x = F.relu(self.fc1(x))
        x = F.relu(self.fc2(x))
        x = self.fc3(x)
        return x


# Initialize model
model = TheModelClass()

# Initialize optimizer
optimizer = optim.SGD(model.parameters(), lr=0.001, momentum=0.9)

# Print model's state_dict
print("Model's state_dict:")
for param_tensor in model.state_dict():
    print(param_tensor, "\t", model.state_dict()[param_tensor].size())

# Print optimizer's state_dict
print("Optimizer's state_dict:")
for var_name in optimizer.state_dict():
    print(var_name, "\t", optimizer.state_dict()[var_name])

# # Print model's state_dict
# print("Model's state_dict:")
# for param_tensor in model.state_dict():
#     print(param_tensor, "\n", model.state_dict()[param_tensor])
#
# model_folder = "/home/yongqiang/pytorch_work/end2end-asr-pytorch-example"
# save_model(model=model, epoch=9, optimizer=optimizer, save_folder=model_folder, name="log", loss="ctc", best_model=False)

load_path = "/home/yongqiang/pytorch_work/end2end-asr-pytorch-example/log/epoch_9.th"
loss = "ctc"
checkpoint = torch.load(load_path)
print("load_path:", load_path)

epoch = checkpoint['epoch']
metrics = checkpoint['metrics']
if 'args' in checkpoint:
    args = checkpoint['args']

label2id = checkpoint['label2id']
id2label = checkpoint['id2label']

model_state_dict_data = checkpoint['model_state_dict']

for k, v in list(model_state_dict_data.items()):
    print(k, "\t", v.size())

for k, v in model_state_dict_data.items():
    print(k, "\n", v)

if loss == "ce":
    _step = checkpoint['optimizer_params']['_step']
    _rate = checkpoint['optimizer_params']['_rate']
    warmup = checkpoint['optimizer_params']['warmup']
    factor = checkpoint['optimizer_params']['factor']
    model_size = checkpoint['optimizer_params']['model_size']
elif loss == "ctc":
    lr = checkpoint['optimizer_params']['lr']
    lr_anneal = checkpoint['optimizer_params']['lr_anneal']
else:
    print("Need to define loss type")

/home/yongqiang/miniconda3/envs/pt-1.4_py-3.6/bin/python /home/yongqiang/pytorch_work/end2end-asr-pytorch-example/yongqiang.py
Model's state_dict:
conv1.weight 	 torch.Size([6, 3, 5, 5])
conv1.bias 	 torch.Size([6])
conv2.weight 	 torch.Size([16, 6, 5, 5])
conv2.bias 	 torch.Size([16])
fc1.weight 	 torch.Size([120, 400])
fc1.bias 	 torch.Size([120])
fc2.weight 	 torch.Size([84, 120])
fc2.bias 	 torch.Size([84])
fc3.weight 	 torch.Size([10, 84])
fc3.bias 	 torch.Size([10])
Optimizer's state_dict:
state 	 {}
param_groups 	 [{'lr': 0.001, 'momentum': 0.9, 'dampening': 0, 'weight_decay': 0, 'nesterov': False, 'params': [140319393794736, 140319372878328, 140319372878400, 140319372878472, 140319372878544, 140319372878616, 140319372878688, 140319372878760, 140319372878832, 140319372878904]}]
load_path: /home/yongqiang/pytorch_work/end2end-asr-pytorch-example/log/epoch_9.th
conv1.weight 	 torch.Size([6, 3, 5, 5])
conv1.bias 	 torch.Size([6])
conv2.weight 	 torch.Size([16, 6, 5, 5])
conv2.bias 	 torch.Size([16])
fc1.weight 	 torch.Size([120, 400])
fc1.bias 	 torch.Size([120])
fc2.weight 	 torch.Size([84, 120])
fc2.bias 	 torch.Size([84])
fc3.weight 	 torch.Size([10, 84])
fc3.bias 	 torch.Size([10])
conv1.weight 
 tensor([[[[ 0.0134, -0.0569, -0.0762, -0.0897, -0.0430],
          [ 0.0423, -0.0034, -0.0318,  0.0950, -0.0198],
          [ 0.0963,  0.0397, -0.0560,  0.0744,  0.0987],
          [ 0.0997,  0.0042, -0.0353, -0.0677,  0.0536],
          [ 0.0791,  0.0338, -0.0949, -0.0367,  0.0982]],

         [[ 0.0951,  0.0567,  0.1046, -0.0704, -0.0307],
          [ 0.0991,  0.0478,  0.0778,  0.0336,  0.0927],
          [ 0.0694,  0.0703,  0.1084,  0.1068,  0.0833],
          [-0.0444,  0.0951,  0.0369, -0.1047,  0.0575],
          [ 0.0866,  0.1128,  0.0547, -0.0558,  0.0164]],

         [[ 0.0562, -0.1008, -0.0253,  0.0131,  0.0880],
          [-0.0422, -0.0437,  0.0181, -0.0247, -0.0727],
          [-0.0072,  0.0113,  0.0032,  0.0571,  0.0431],
          [-0.0011, -0.0778,  0.0533,  0.0441,  0.0252],
          [-0.0267,  0.0294, -0.0218, -0.1044, -0.0390]]],


        [[[ 0.0848,  0.1063, -0.0568, -0.0719,  0.0082],
          [-0.0797,  0.0945, -0.0328, -0.0290, -0.0759],
          [ 0.0794, -0.0985, -0.0748, -0.0041,  0.0019],
          [-0.0375, -0.0602, -0.0819, -0.1079,  0.0773],
          [ 0.0944, -0.0515, -0.0621,  0.0124,  0.0627]],

         [[ 0.0582, -0.1018,  0.0449, -0.0540,  0.0171],
          [-0.0551,  0.1112, -0.0233, -0.1066,  0.0431],
          [ 0.0936, -0.1010, -0.0952, -0.0989, -0.0224],
          [-0.0374, -0.0663, -0.1145, -0.0551, -0.0156],
          [-0.0178, -0.0025, -0.0656, -0.0878,  0.0368]],

         [[ 0.0199, -0.0584,  0.0842, -0.1144, -0.0983],
          [ 0.0355,  0.0959, -0.0010, -0.0964,  0.0487],
          [-0.0616, -0.0055,  0.0847,  0.0304, -0.0560],
          [ 0.0960, -0.1062,  0.1005, -0.0658, -0.0791],
          [ 0.0265, -0.0321, -0.0826, -0.1112, -0.0275]]],


        [[[-0.1136, -0.0062, -0.0536, -0.0138, -0.1113],
          [ 0.0939,  0.0356, -0.0073,  0.0578,  0.0617],
          [ 0.0470,  0.0967, -0.0801,  0.1070,  0.0272],
          [ 0.0662,  0.0654,  0.0242,  0.1028, -0.0372],
          [ 0.0699,  0.0887,  0.1111,  0.0259, -0.0816]],

         [[ 0.0348,  0.0764,  0.0776, -0.0557,  0.0993],
          [ 0.0266, -0.0372, -0.0937,  0.0853, -0.0370],
          [ 0.0783, -0.0296,  0.0722, -0.1091, -0.1092],
          [ 0.1108,  0.1095, -0.0578,  0.0673,  0.0882],
          [ 0.0088, -0.0430,  0.0211,  0.1035, -0.0614]],

         [[ 0.0309,  0.0961, -0.0820,  0.0557,  0.1115],
          [ 0.0167,  0.0800,  0.0058,  0.1004, -0.0041],
          [-0.0032, -0.0768,  0.0156,  0.0554,  0.0967],
          [-0.0623, -0.0133,  0.0419,  0.0855, -0.0093],
          [-0.0782,  0.0276, -0.0715, -0.0109,  0.0090]]],


        [[[-0.0351,  0.0579, -0.0033,  0.0753,  0.0762],
          [ 0.0846,  0.0981, -0.0195, -0.0937,  0.0385],
          [-0.0608, -0.0624,  0.0757,  0.0834, -0.0371],
          [ 0.0948, -0.0822, -0.1009,  0.1132, -0.1114],
          [ 0.0245,  0.0276,  0.0120, -0.0866,  0.0686]],

         [[ 0.0277,  0.0420,  0.0820,  0.0180, -0.0904],
          [-0.0340,  0.0228,  0.1026,  0.0177,  0.0052],
          [-0.0608, -0.0869, -0.1016,  0.0253,  0.1009],
          [-0.0366, -0.1122,  0.0936, -0.0223, -0.0959],
          [-0.0824,  0.1027,  0.0007, -0.0691, -0.0679]],

         [[-0.0165, -0.0798,  0.0052, -0.0003, -0.1124],
          [-0.0151,  0.0668,  0.1082, -0.0442, -0.1077],
          [-0.0055, -0.0172,  0.0627,  0.0263, -0.0847],
          [ 0.0741,  0.1126,  0.0694, -0.0641, -0.0290],
          [-0.0695, -0.0944,  0.0760,  0.1055,  0.1067]]],


        [[[-0.0462, -0.0732, -0.0339, -0.0569,  0.0344],
          [ 0.0217, -0.0960, -0.1135, -0.0820,  0.0353],
          [ 0.0226,  0.0066,  0.0191,  0.0522,  0.0986],
          [-0.0946,  0.0257, -0.0353,  0.0866, -0.0039],
          [ 0.0354, -0.0436, -0.0857,  0.0711,  0.0421]],

         [[-0.0715,  0.0448, -0.0478,  0.0755, -0.0578],
          [ 0.1127, -0.0526, -0.1046,  0.0710, -0.0189],
          [-0.0079,  0.1007, -0.0464, -0.0967,  0.0137],
          [ 0.0587, -0.0915,  0.0212,  0.0212,  0.0259],
          [ 0.0572,  0.0283,  0.0013,  0.1119, -0.0051]],

         [[ 0.0268,  0.0909, -0.0809, -0.0398,  0.1101],
          [ 0.1020,  0.0993,  0.0176, -0.0125, -0.0770],
          [ 0.0786, -0.1061,  0.0761, -0.0004,  0.0296],
          [-0.0290, -0.0337,  0.0569, -0.0355, -0.0754],
          [ 0.0760, -0.0028,  0.0969, -0.0407,  0.0886]]],


        [[[ 0.0132, -0.0045, -0.0614,  0.0071, -0.1035],
          [ 0.0208,  0.0518, -0.1080, -0.0129, -0.0524],
          [-0.0328,  0.0093, -0.0668,  0.0273,  0.0307],
          [ 0.0311,  0.0798,  0.0596, -0.0256, -0.0979],
          [-0.0542, -0.0553, -0.0613, -0.0782, -0.0579]],

         [[-0.0875, -0.0210,  0.0966, -0.0768, -0.0592],
          [ 0.0897,  0.0828,  0.0343,  0.0133, -0.0669],
          [-0.0986, -0.0866, -0.1075,  0.0613,  0.0486],
          [-0.1153,  0.1053,  0.0163, -0.0813, -0.0268],
          [-0.0267,  0.1103, -0.0289, -0.0212, -0.0427]],

         [[-0.0471, -0.0681,  0.0008,  0.0685, -0.0726],
          [-0.0161, -0.0378,  0.0142, -0.0448, -0.0542],
          [-0.0594, -0.1057, -0.0388,  0.0282, -0.0045],
          [-0.0942,  0.1069, -0.0601, -0.1092,  0.0658],
          [ 0.0058, -0.0673, -0.0139,  0.0544,  0.0266]]]])
conv1.bias 
 tensor([-0.0518,  0.0757, -0.0106,  0.0676, -0.0081,  0.0287])
conv2.weight 
 tensor([[[[ 3.7345e-02,  3.6917e-02,  7.1036e-04, -1.8674e-02, -2.0425e-02],
          [ 6.0888e-02,  3.9115e-02,  6.0703e-02, -4.9426e-02,  2.0759e-03],
          [ 4.4816e-02,  2.4605e-02,  7.5611e-02,  7.0319e-02, -4.8377e-03],
          [-5.8067e-02,  2.4481e-02, -7.9937e-02, -6.9166e-02,  4.6737e-02],
          [-1.4972e-02, -7.3878e-02, -7.6790e-02, -1.9032e-02, -1.9925e-02]],

         [[ 2.2720e-02, -2.5971e-02, -6.3477e-02,  1.7530e-02,  2.5905e-02],
          [-6.4675e-02, -6.6284e-02, -8.0954e-02,  3.6396e-02, -6.3034e-02],
          [-1.8645e-03,  4.3630e-02, -2.6132e-02, -7.6036e-02,  7.8512e-02],
          [ 4.3217e-02, -3.1471e-02, -9.8552e-03,  3.9861e-02, -2.3391e-02],
          [ 2.8649e-02,  5.9830e-02,  2.7637e-02,  1.8659e-02, -5.6407e-02]],

         [[-4.5381e-02,  5.4505e-03,  4.0909e-03,  5.6494e-02, -5.9140e-02],
          [ 2.3393e-02, -3.3347e-02, -6.6707e-02,  1.9643e-02, -2.0795e-02],
          [-7.7018e-02,  3.0887e-02, -1.8568e-02,  6.2216e-02,  1.9621e-02],
          [ 3.4469e-02, -8.1499e-03,  3.7170e-03, -4.2050e-02, -1.1584e-02],
          [ 1.5811e-02,  7.1654e-02, -6.3917e-02,  2.9590e-02,  2.9235e-02]],

         [[ 3.9611e-02, -2.3142e-02,  3.7166e-02, -3.2922e-02, -7.0504e-02],
          [-5.1643e-03,  7.2051e-02,  4.7072e-02,  4.3785e-02,  3.0908e-02],
          [ 4.5853e-02, -5.6516e-02, -7.1385e-02,  4.2695e-02,  2.6823e-02],
          [-3.3464e-03, -5.1895e-02, -5.9452e-02, -1.4120e-02, -4.2594e-02],
          [-5.9216e-03, -7.9317e-02,  3.2249e-02,  2.3419e-02,  1.3252e-02]],

         [[ 1.1583e-02,  7.0880e-02,  1.9811e-02,  8.0881e-02, -4.4006e-02],
          [ 3.5248e-02, -2.2885e-02,  2.7275e-02,  4.8847e-02, -2.0026e-02],
          [ 3.0439e-02,  7.2296e-02,  7.4953e-02,  4.2624e-02,  3.7546e-02],
          [ 6.0431e-02, -7.8233e-02, -2.1510e-02,  7.9771e-02,  2.0746e-02],
          [-6.6449e-02,  8.1094e-02,  1.7321e-03,  9.2274e-03,  4.6537e-02]],

         [[-4.3946e-02,  3.4462e-02,  4.2152e-02, -3.2618e-02, -3.1919e-02],
          [ 3.7446e-02, -1.6665e-02, -4.6682e-02, -4.2671e-02,  3.8549e-02],
          [ 2.0830e-02,  3.1240e-02,  1.4524e-02,  6.9791e-02,  6.4641e-02],
          [-5.1961e-02,  2.9224e-02, -8.1169e-02,  5.0892e-02,  7.3306e-02],
          [-5.3219e-02,  4.4223e-02,  1.6751e-02,  1.0149e-02,  2.7877e-02]]],


        [[[ 4.0394e-02, -7.5706e-02,  4.0839e-02,  4.1527e-02,  7.5264e-02],
          [ 5.0450e-02,  2.5265e-02,  5.6446e-02, -9.1349e-03, -2.0079e-02],
          [ 4.0942e-03,  1.9982e-02,  7.8088e-02,  1.3781e-02,  5.9151e-02],
          [ 1.5582e-03, -7.1641e-02, -3.9163e-02,  2.0775e-02,  6.9971e-04],
          [ 1.4160e-02, -2.1367e-02,  7.2005e-02,  3.0974e-02,  5.5173e-02]],

         [[ 2.6252e-02,  2.1418e-02, -7.9839e-03, -6.4778e-02, -1.7517e-02],
          [ 3.0320e-02, -7.2231e-03, -6.9592e-02, -2.1982e-02, -7.9040e-02],
          [-6.8035e-02, -4.2568e-02,  6.6144e-03,  3.2699e-02,  4.0103e-03],
          [-7.6126e-02,  6.6878e-02,  2.0500e-02,  7.3015e-02,  4.4364e-02],
          [ 6.4838e-02, -6.1012e-02, -3.7262e-02, -2.4985e-02,  4.3699e-02]],

         [[ 2.9414e-02, -6.4046e-02, -7.9652e-02,  5.7636e-02,  1.8768e-02],
          [ 1.3999e-02,  6.1029e-04, -2.5749e-02,  5.8838e-02, -7.4751e-02],
          [ 6.9579e-02,  4.0549e-02,  2.6570e-02,  1.9762e-02,  6.2969e-02],
          [-3.0153e-02,  4.1174e-02, -1.4508e-02, -7.5177e-02,  6.0151e-02],
          [ 3.5434e-02, -3.4538e-02,  9.2284e-04, -6.9311e-02,  5.5276e-02]],

         [[-1.1414e-02,  5.8701e-02, -8.5818e-03,  5.3662e-02, -6.6120e-02],
          [-6.6017e-02, -7.2396e-02,  3.9557e-02, -3.2917e-02,  2.3871e-02],
          [ 3.6651e-02, -5.7215e-02,  7.5414e-02, -2.8045e-02, -5.4822e-03],
          [ 6.6979e-03,  4.3628e-02, -7.9500e-02,  1.6581e-02, -2.0443e-03],
          [ 4.8128e-02, -2.5281e-03,  6.3143e-02,  5.5622e-02,  5.0823e-02]],

         [[ 2.6724e-02, -4.4536e-02,  3.8301e-02, -1.6428e-02,  7.6386e-03],
          [ 6.9414e-02,  6.4933e-02, -6.0955e-02,  4.2710e-02,  6.7609e-02],
          [ 3.2155e-02, -3.9059e-02, -8.1267e-02,  6.0943e-02,  7.8761e-02],
          [ 9.2058e-03, -6.8716e-02,  5.9567e-02, -4.7019e-02,  4.3247e-02],
          [-8.0727e-02, -7.7734e-02,  6.4857e-02,  1.4899e-03,  4.7743e-02]],

         [[ 4.5334e-02,  1.9591e-02,  4.8368e-02, -7.7771e-02, -2.2575e-02],
          [ 2.3952e-02, -7.0499e-02, -6.3645e-02, -1.8650e-02, -2.2908e-02],
          [ 4.0042e-02,  5.1475e-02, -4.9496e-02, -7.1370e-02, -7.3195e-02],
          [ 7.5049e-02, -5.0792e-02,  7.1668e-02,  2.3936e-02,  7.4133e-02],
          [ 7.1541e-03,  3.1869e-02, -7.6662e-02, -1.1101e-02, -6.0137e-02]]],


        [[[-3.1704e-02, -6.3101e-02,  5.3376e-02, -7.5087e-02,  7.4585e-02],
          [ 3.3090e-03, -1.2829e-02,  4.2248e-03, -3.8829e-02,  2.2651e-02],
          [-3.0851e-02,  6.0922e-02,  6.3309e-02, -1.0301e-02, -3.4664e-04],
          [ 3.7358e-03,  5.7330e-02,  7.6840e-02,  4.9586e-02, -5.9538e-02],
          [ 3.8326e-02, -6.2273e-02,  3.8199e-02,  9.8926e-03, -6.3158e-02]],

         [[ 3.4346e-02, -3.0442e-02,  9.5122e-03,  5.9389e-03,  6.4301e-02],
          [ 3.7045e-02, -1.4779e-02,  5.3899e-02,  1.2880e-02,  3.2471e-02],
          [-4.9418e-02, -7.3648e-02, -1.2379e-02,  6.0696e-02, -4.2189e-03],
          [-5.9245e-03, -2.1985e-02,  3.4781e-02,  6.4471e-02, -6.9246e-02],
          [-5.3104e-02,  4.7656e-02, -4.7472e-02,  5.0731e-02,  6.2965e-02]],

         [[-7.3520e-02, -2.9047e-02,  2.4189e-02,  9.0452e-03,  3.1695e-02],
          [-5.4734e-02,  2.8641e-02, -6.1692e-02, -2.3195e-02,  1.1079e-02],
          [ 5.4220e-02, -7.6615e-02,  5.6999e-02,  2.3648e-03, -4.8024e-02],
          [ 2.4688e-02,  2.6944e-02, -5.6788e-02,  4.2183e-02, -2.4088e-02],
          [ 5.4214e-03, -1.7575e-02,  6.7586e-02,  5.1225e-02,  2.3619e-02]],

         [[-2.5996e-03, -7.9755e-02,  6.8051e-02,  7.2919e-02,  7.9731e-02],
          [-5.1041e-02, -2.0511e-02,  4.2875e-02, -3.9107e-02,  7.6118e-02],
          [ 9.0300e-03,  3.5622e-02,  1.0749e-02, -1.6854e-02, -5.9495e-02],
          [ 3.1847e-02,  7.7569e-02,  3.8437e-02, -3.2095e-02,  8.4368e-03],
          [ 2.2344e-02,  5.6201e-02,  4.6436e-02, -6.0905e-02, -6.2908e-02]],

         [[ 1.1728e-02, -2.6272e-03, -3.3887e-02, -4.3492e-02,  1.4497e-02],
          [ 3.9038e-02, -6.3376e-02, -2.3013e-02, -4.0424e-02,  1.4657e-02],
          [ 4.9225e-03, -3.3368e-02,  1.8460e-02, -3.6485e-02, -6.4865e-02],
          [-6.4731e-05, -6.8108e-02, -1.3410e-02, -3.4235e-02, -6.4206e-02],
          [-1.7974e-02,  3.3237e-02,  5.5293e-02, -3.7493e-02,  2.4666e-02]],

         [[ 6.7512e-02, -8.9689e-03, -2.2933e-02,  1.6010e-02,  6.9638e-02],
          [-3.7105e-02,  6.7179e-02, -1.0214e-02, -4.8984e-02, -4.1328e-02],
          [-8.6841e-03, -1.7692e-02, -1.4424e-02,  7.3771e-02,  1.2215e-02],
          [-7.9845e-02,  6.7047e-02,  3.1997e-02, -5.1304e-02,  6.0482e-02],
          [-1.4893e-02, -2.7676e-02,  5.4934e-02,  6.3990e-02,  2.7662e-02]]],


        ...,


        [[[ 1.0199e-02, -5.2322e-02, -3.7206e-02, -1.4544e-02, -4.6219e-02],
          [-1.2879e-02,  4.7828e-02,  7.8953e-02, -1.3771e-02,  2.3383e-02],
          [-2.0076e-02, -2.9035e-02,  5.4802e-02,  3.0305e-02, -2.6655e-02],
          [ 4.6331e-02,  6.4619e-02,  3.6669e-02, -5.7059e-02, -1.2394e-02],
          [-4.7441e-02,  3.9712e-02, -6.8904e-02, -7.4229e-02, -5.4093e-03]],

         [[-3.7258e-02,  8.1076e-02, -7.6125e-02, -2.9282e-02,  1.0107e-02],
          [ 2.3341e-02,  2.7966e-02,  4.9232e-02,  4.2246e-02, -2.2956e-02],
          [ 6.1198e-02,  5.5776e-02, -6.1231e-02, -7.3973e-02, -1.8083e-02],
          [ 2.4045e-02,  5.3863e-02,  3.1777e-02, -3.6017e-02,  7.9472e-02],
          [-6.9948e-02,  6.3699e-02,  4.3250e-02,  4.6033e-02,  8.9336e-04]],

         [[ 2.5100e-02,  5.6652e-03, -3.4297e-02, -4.4208e-02, -3.4073e-03],
          [ 1.6430e-02, -5.2721e-02,  3.6158e-02,  7.7301e-02,  4.7632e-03],
          [ 7.6485e-02, -4.3372e-03,  4.2569e-02, -3.6177e-02, -2.1329e-03],
          [-8.4518e-03, -9.9161e-03, -2.1800e-02, -6.9410e-02,  5.5372e-02],
          [ 7.8497e-02,  6.9532e-02, -1.9760e-03, -4.9298e-02, -3.0007e-02]],

         [[ 5.7926e-02, -1.7255e-02,  5.0323e-02,  7.3531e-03,  7.3418e-02],
          [ 5.8616e-02,  3.4145e-02,  6.8296e-02, -8.6052e-03, -1.9639e-02],
          [-7.3903e-03,  3.6604e-02, -6.6443e-02,  4.2403e-02, -1.6865e-02],
          [-4.4089e-02, -3.2403e-02, -3.5313e-02,  6.2766e-02,  4.4323e-02],
          [-7.7840e-02, -5.1307e-02,  3.1470e-02,  2.4083e-02, -3.1171e-02]],

         [[-7.2114e-02,  4.7854e-02, -3.7292e-02,  3.6088e-02, -1.2283e-02],
          [ 2.4217e-02,  4.2792e-02,  4.0946e-02, -4.8699e-02, -2.0729e-02],
          [ 4.8880e-02, -8.1478e-02, -7.3582e-02, -2.0240e-02,  5.9753e-02],
          [ 3.2898e-02, -1.7339e-02, -5.6911e-02,  6.5161e-02,  1.7858e-02],
          [ 3.9480e-02, -3.2163e-02,  3.1307e-02, -5.5870e-02, -1.6457e-02]],

         [[ 2.2531e-02, -8.0621e-02,  6.8040e-02, -5.3297e-02,  6.8076e-02],
          [-2.8019e-02, -4.2122e-02,  9.4667e-03,  1.1930e-02,  5.3379e-02],
          [ 7.0529e-03, -7.5513e-02, -5.8059e-03, -2.2628e-02, -3.1337e-02],
          [ 4.6129e-02,  1.0021e-02,  1.3801e-02,  5.4890e-02,  8.2099e-03],
          [-3.4219e-02, -3.5771e-02,  4.9384e-02,  4.2971e-03,  1.0185e-02]]],


        [[[ 1.5722e-02, -1.1761e-02, -5.5889e-02, -4.0064e-02,  4.6955e-02],
          [ 3.8323e-02,  1.4971e-02,  4.7523e-02, -7.3487e-02, -6.6887e-02],
          [ 5.4269e-02,  6.5048e-03, -2.5124e-02,  7.0294e-02,  2.8026e-02],
          [-7.3427e-02, -6.3417e-02, -6.9835e-02,  5.1313e-02, -3.0339e-02],
          [-3.7274e-02, -6.8501e-02,  1.3327e-02,  7.0011e-03,  7.3245e-02]],

         [[ 3.7582e-02,  8.0081e-02, -4.8837e-02,  7.3556e-03,  6.0067e-02],
          [-3.2691e-02, -4.5259e-02, -6.4128e-02, -5.0167e-02, -2.3580e-02],
          [-5.0516e-02, -7.3288e-02, -5.1057e-02, -4.8879e-02,  1.4947e-02],
          [ 5.5937e-02,  2.8608e-02, -5.1257e-02, -9.8808e-03,  7.7368e-03],
          [-6.1867e-03,  5.5593e-02, -6.4040e-02, -1.9817e-02,  4.8827e-02]],

         [[ 7.0229e-02,  3.4918e-02,  3.4894e-02, -3.7108e-02,  5.1772e-02],
          [-8.0578e-02, -1.9076e-02, -5.2325e-02, -3.1958e-02, -1.4594e-02],
          [-6.5627e-02, -4.4570e-02,  7.8603e-02, -1.8659e-02, -4.3064e-02],
          [-5.8323e-02,  2.8146e-02,  2.7508e-02, -2.0674e-03, -2.3774e-02],
          [ 5.3830e-02,  3.1012e-03, -1.3121e-02,  4.2424e-02,  1.3136e-03]],

         [[ 6.2128e-02, -8.1062e-02,  5.5346e-03,  4.9415e-02,  1.9914e-02],
          [ 4.5311e-02,  4.5756e-02, -1.6908e-02,  1.4688e-02,  3.0094e-02],
          [-4.5764e-03, -3.1909e-02,  6.0014e-02, -6.2276e-02, -3.4248e-02],
          [ 3.9257e-02,  1.9695e-02, -8.7933e-03,  7.9642e-02, -3.3138e-02],
          [-1.9051e-02,  4.8750e-02, -4.9173e-03,  7.9091e-02,  1.4798e-02]],

         [[-5.5164e-02,  3.3881e-02, -3.4420e-04,  6.3908e-02,  7.2785e-02],
          [ 3.9900e-02,  6.5517e-03, -8.0552e-02, -1.5329e-02, -5.7844e-02],
          [-6.8076e-03,  5.0193e-02,  7.4728e-02, -2.9026e-02,  2.0508e-02],
          [ 6.2877e-02,  8.0455e-02,  5.1410e-02, -3.8253e-02, -6.9830e-02],
          [-1.4001e-02, -6.4940e-02,  4.3407e-03, -2.0574e-03, -6.3191e-02]],

         [[ 1.8252e-02,  8.8192e-03,  1.6308e-02,  8.0085e-02,  5.4516e-02],
          [ 3.2235e-02, -1.4626e-02, -5.4716e-02,  7.6015e-02, -5.5979e-02],
          [ 3.0786e-02,  3.1961e-02, -1.8162e-02,  6.9846e-02,  3.8360e-02],
          [ 7.4391e-02,  1.7810e-02,  5.8638e-02, -2.3435e-02, -5.3262e-02],
          [ 7.7326e-02, -2.3349e-02, -7.6374e-02, -1.5019e-02, -4.1779e-02]]],


        [[[-4.0523e-02, -1.3673e-02,  1.7464e-02, -8.0872e-02,  1.3032e-02],
          [-5.7647e-02,  7.6154e-02,  2.9756e-02,  6.1005e-02, -6.2796e-02],
          [-1.8865e-02, -3.1772e-02, -3.0434e-02, -6.4698e-02, -7.2306e-02],
          [-1.3506e-02,  7.4316e-02,  3.0436e-02, -1.4313e-02,  7.2392e-02],
          [-3.5933e-02,  1.1374e-02, -1.8395e-02, -3.0843e-02,  5.5321e-02]],

         [[ 1.4394e-02,  3.6295e-02,  4.9757e-02, -4.6856e-03,  9.4505e-03],
          [-5.9689e-02,  5.7720e-02,  7.4232e-02,  7.1351e-02,  3.7225e-02],
          [ 3.1824e-02, -3.5381e-02, -3.4969e-02,  8.2402e-03, -2.4470e-02],
          [-7.7093e-02,  4.4498e-02,  4.7004e-02, -7.2011e-02, -3.8237e-02],
          [-6.7163e-02, -1.7111e-02,  5.9214e-02,  7.7507e-02, -4.4913e-02]],

         [[-8.0340e-02,  1.9304e-02,  1.0000e-03,  4.0900e-03, -6.6380e-02],
          [-6.6810e-02, -6.3729e-03,  2.0123e-02, -4.3774e-02, -3.4981e-02],
          [-6.8541e-02,  1.6689e-02, -7.1792e-02,  1.5246e-02, -6.8854e-04],
          [ 4.3843e-02,  2.6870e-02, -3.8101e-04, -7.8826e-02,  6.5015e-02],
          [-2.0440e-02,  7.9386e-02, -3.8219e-03, -2.4580e-02,  4.0634e-02]],

         [[ 5.5544e-02, -3.7836e-02, -7.9338e-02,  7.0619e-02, -3.3446e-02],
          [-3.2902e-03, -4.8389e-02,  4.7453e-02,  5.8127e-02, -4.8315e-03],
          [-5.6379e-02, -5.8292e-02, -3.9011e-02, -4.0794e-03,  4.6537e-02],
          [ 8.0529e-02,  6.2578e-02,  2.7042e-02, -7.3989e-02,  8.9902e-03],
          [-3.2401e-02, -6.5050e-02,  6.9688e-02, -6.6803e-02,  3.6991e-02]],

         [[-2.2901e-02,  4.9715e-02, -5.3222e-02, -7.6049e-02, -6.6969e-02],
          [ 9.8178e-03, -6.9489e-02, -3.7907e-02,  3.8849e-02, -3.6555e-02],
          [-4.1870e-02,  1.7604e-02,  1.3747e-02,  1.2642e-02, -2.0011e-02],
          [-6.3959e-02,  6.5200e-02, -3.1475e-02, -5.4463e-02, -7.0515e-02],
          [-3.3264e-02,  5.0797e-04,  4.7155e-02,  5.2026e-02, -6.2295e-02]],

         [[-7.0623e-02, -3.8786e-02, -1.6983e-02, -4.0680e-02, -3.5289e-03],
          [ 6.6456e-02, -5.6733e-02, -3.2044e-02, -8.0967e-02,  6.8003e-02],
          [ 7.7967e-02, -1.8929e-02,  4.1282e-02, -4.3998e-02,  7.8460e-02],
          [ 4.9829e-02, -3.3562e-02, -7.5281e-02,  5.1357e-02,  2.8261e-02],
          [ 6.7955e-02, -3.4526e-02, -4.7810e-02,  7.8384e-02, -1.2523e-02]]]])
conv2.bias 
 tensor([-0.0807, -0.0406, -0.0171,  0.0340,  0.0777,  0.0155,  0.0788, -0.0089,
        -0.0693, -0.0276,  0.0578,  0.0165,  0.0525, -0.0673, -0.0171,  0.0804])
fc1.weight 
 tensor([[-0.0032, -0.0052, -0.0235,  ..., -0.0029, -0.0208,  0.0015],
        [-0.0460,  0.0396,  0.0050,  ...,  0.0194,  0.0410,  0.0174],
        [-0.0391, -0.0378, -0.0265,  ...,  0.0376,  0.0157,  0.0217],
        ...,
        [-0.0304, -0.0122,  0.0140,  ..., -0.0028,  0.0002, -0.0292],
        [ 0.0346,  0.0187,  0.0185,  ..., -0.0256,  0.0434, -0.0359],
        [-0.0116,  0.0302,  0.0412,  ...,  0.0163, -0.0479, -0.0163]])
fc1.bias 
 tensor([-0.0453,  0.0006,  0.0223,  0.0062, -0.0188,  0.0133,  0.0158, -0.0061,
         0.0145,  0.0035, -0.0145, -0.0451,  0.0352,  0.0302, -0.0086,  0.0333,
         0.0025,  0.0102,  0.0128, -0.0002, -0.0021,  0.0439,  0.0468,  0.0120,
        -0.0089,  0.0115, -0.0260,  0.0178,  0.0217,  0.0456, -0.0116,  0.0224,
        -0.0207, -0.0060, -0.0386,  0.0405, -0.0180, -0.0116, -0.0009,  0.0239,
        -0.0203, -0.0064,  0.0406, -0.0173,  0.0378,  0.0180,  0.0195,  0.0145,
        -0.0435,  0.0157,  0.0363, -0.0170,  0.0064, -0.0107,  0.0341, -0.0227,
         0.0179, -0.0221, -0.0264, -0.0018, -0.0104, -0.0481, -0.0343, -0.0245,
        -0.0190, -0.0425,  0.0329, -0.0488,  0.0279,  0.0034, -0.0491, -0.0422,
         0.0237,  0.0211, -0.0260, -0.0380,  0.0010, -0.0025, -0.0367,  0.0386,
        -0.0090,  0.0157, -0.0292, -0.0302,  0.0423, -0.0476, -0.0401,  0.0195,
         0.0315,  0.0285,  0.0456,  0.0297,  0.0091, -0.0291,  0.0301, -0.0231,
        -0.0055,  0.0359,  0.0164, -0.0297,  0.0243, -0.0429,  0.0311,  0.0072,
        -0.0400, -0.0473,  0.0470, -0.0068, -0.0160,  0.0092, -0.0327, -0.0040,
         0.0245,  0.0178, -0.0249, -0.0146,  0.0401,  0.0019,  0.0216, -0.0497])
fc2.weight 
 tensor([[ 0.0327, -0.0668,  0.0384,  ..., -0.0704,  0.0015, -0.0560],
        [-0.0657, -0.0632,  0.0364,  ...,  0.0721,  0.0100,  0.0768],
        [ 0.0130,  0.0838, -0.0002,  ...,  0.0178,  0.0226, -0.0903],
        ...,
        [ 0.0313, -0.0549,  0.0724,  ...,  0.0874, -0.0439, -0.0327],
        [ 0.0371, -0.0318,  0.0875,  ...,  0.0631, -0.0813,  0.0321],
        [-0.0071,  0.0583,  0.0542,  ..., -0.0285,  0.0750,  0.0572]])
fc2.bias 
 tensor([ 0.0134, -0.0475,  0.0059,  0.0127,  0.0480, -0.0361,  0.0095,  0.0558,
         0.0322,  0.0769,  0.0570, -0.0611, -0.0341, -0.0588, -0.0095,  0.0799,
        -0.0563,  0.0562, -0.0833, -0.0296,  0.0359, -0.0193, -0.0529, -0.0428,
        -0.0597, -0.0521,  0.0202, -0.0128, -0.0195,  0.0684, -0.0416,  0.0795,
         0.0608,  0.0296, -0.0304, -0.0381, -0.0834,  0.0112,  0.0332,  0.0466,
        -0.0837, -0.0375,  0.0573, -0.0380,  0.0802, -0.0668,  0.0514, -0.0070,
         0.0033,  0.0871, -0.0178,  0.0522, -0.0571,  0.0472,  0.0561,  0.0215,
         0.0012, -0.0419, -0.0055, -0.0174, -0.0204, -0.0619, -0.0016, -0.0784,
        -0.0819, -0.0562, -0.0807,  0.0765,  0.0316,  0.0423, -0.0063, -0.0210,
         0.0315,  0.0806,  0.0109, -0.0175,  0.0705, -0.0466,  0.0097,  0.0098,
         0.0798,  0.0796, -0.0783, -0.0730])
fc3.weight 
 tensor([[-0.0351,  0.0782,  0.0364, -0.0547,  0.0099, -0.0188, -0.0582, -0.0056,
         -0.1074,  0.0905, -0.0629,  0.1084, -0.0456, -0.0382,  0.0472, -0.0674,
         -0.0414, -0.0523, -0.0009, -0.0946, -0.0463,  0.0898,  0.0631, -0.0910,
          0.0091, -0.0383, -0.0311,  0.0978,  0.0204, -0.1040, -0.0793, -0.1019,
          0.0631, -0.0136, -0.0283, -0.0260, -0.1050,  0.0029, -0.1016, -0.0253,
         -0.0350,  0.0862,  0.0013,  0.0482, -0.0284,  0.0073,  0.0053,  0.0703,
          0.0078, -0.0487,  0.0899, -0.0934,  0.0385, -0.0764,  0.0070, -0.0486,
          0.0197,  0.0372, -0.0763,  0.0477, -0.1010, -0.0224, -0.0491, -0.0548,
         -0.0461,  0.0629,  0.0691,  0.0064, -0.0637, -0.0727,  0.0074, -0.0961,
         -0.0429,  0.0362,  0.0140,  0.0659, -0.0640,  0.0336,  0.1020, -0.0572,
          0.0779,  0.0741, -0.0588, -0.0856],
        [ 0.0477, -0.0715,  0.0228,  0.0505,  0.0311, -0.0720,  0.0702,  0.0573,
          0.0241,  0.0151, -0.1036, -0.0023,  0.0145,  0.0096, -0.0538,  0.0805,
          0.0951,  0.1079, -0.0928,  0.0317, -0.0034, -0.0983,  0.0781,  0.0109,
         -0.0001, -0.0881,  0.0208,  0.0635,  0.0434,  0.1080, -0.0625, -0.0924,
          0.0947,  0.0438,  0.0596, -0.0324, -0.0555, -0.0729,  0.0614, -0.0416,
         -0.0770, -0.0748, -0.0193, -0.0991, -0.0900,  0.0201,  0.0715, -0.0605,
          0.0033, -0.0418,  0.0473, -0.1069,  0.0095, -0.0540,  0.0695,  0.1042,
          0.0245,  0.0715,  0.1073,  0.0643, -0.0956, -0.0843,  0.0391,  0.0033,
          0.0787, -0.1019, -0.0056, -0.0830,  0.0327,  0.0031, -0.0049, -0.0971,
          0.0549, -0.0827, -0.0933, -0.0676, -0.0909, -0.0925,  0.0300,  0.0154,
          0.0706, -0.0158, -0.0479, -0.0853],
        [-0.0502, -0.0625,  0.0947, -0.0247,  0.0150, -0.0101,  0.0695,  0.0064,
          0.0390, -0.0735, -0.0347,  0.0255, -0.0964,  0.0016, -0.0568,  0.1050,
         -0.0972,  0.0176,  0.0756, -0.0781,  0.0529, -0.0751,  0.0823,  0.0953,
          0.0651, -0.0740, -0.0460,  0.0549, -0.0199, -0.0245, -0.1058,  0.0528,
         -0.1070,  0.0086,  0.0665, -0.0895,  0.0798, -0.0902,  0.1072,  0.0173,
          0.0257,  0.0481, -0.0289,  0.0291, -0.1057, -0.0429, -0.1033, -0.0716,
         -0.0690,  0.0628,  0.0718, -0.1040, -0.1042,  0.0831,  0.0844,  0.1073,
          0.0846,  0.0477,  0.0619,  0.0479, -0.0872, -0.0647,  0.0556,  0.0539,
         -0.0421, -0.0697,  0.0625,  0.0688,  0.0778,  0.0239, -0.0431,  0.0142,
          0.0107,  0.0207,  0.0443, -0.0164,  0.0764, -0.0577, -0.0278, -0.0256,
          0.0416, -0.0133, -0.0185,  0.0236],
        [ 0.1058,  0.0798,  0.0002,  0.0795,  0.1071, -0.0654,  0.0125,  0.0800,
         -0.0396, -0.0300,  0.0885,  0.0236, -0.0025, -0.0938, -0.0494,  0.0230,
          0.0239, -0.0673, -0.0466,  0.0425,  0.0175,  0.0395,  0.1038,  0.0651,
         -0.0052,  0.0480, -0.0311, -0.0081,  0.0582,  0.0096,  0.0950,  0.0721,
         -0.0648,  0.0789, -0.0936,  0.0235, -0.0654, -0.0109,  0.0745,  0.0503,
          0.0175,  0.0719,  0.1069, -0.0279, -0.0692, -0.0972,  0.0624, -0.0348,
          0.0703,  0.0675,  0.0817, -0.0586,  0.0760, -0.0307, -0.0205, -0.0042,
          0.0939,  0.0528, -0.0242, -0.0632, -0.0184, -0.0781,  0.0133,  0.0246,
          0.0962, -0.0454,  0.0117, -0.0783,  0.0378,  0.0393,  0.0874,  0.0969,
         -0.1045,  0.0167, -0.0041, -0.0450, -0.0738, -0.0291, -0.0480,  0.1086,
          0.0284, -0.0981,  0.0933,  0.0159],
        [-0.1059,  0.0177, -0.0834,  0.0280, -0.0464,  0.0020,  0.0380,  0.1060,
          0.0865, -0.0857, -0.0820, -0.0471,  0.0236, -0.0603,  0.0667,  0.0142,
          0.0905, -0.0847, -0.0254, -0.0391, -0.0173, -0.1018, -0.0318,  0.0223,
         -0.0592,  0.0776,  0.0137, -0.0570, -0.0139,  0.0646, -0.0869, -0.0624,
         -0.0882,  0.0348,  0.0161, -0.0336,  0.0755,  0.1023, -0.1000, -0.0267,
         -0.1080,  0.0422, -0.0045,  0.0512, -0.0861, -0.0749,  0.1039, -0.0755,
          0.0404,  0.0974, -0.0386, -0.0557,  0.0769, -0.0065,  0.0711,  0.0475,
          0.0997,  0.0045,  0.0312, -0.0187, -0.1067, -0.0217,  0.0200, -0.0657,
          0.0338,  0.0409,  0.0502,  0.0765, -0.0575,  0.0739, -0.0023, -0.0798,
          0.0923,  0.0901, -0.0271,  0.0934,  0.0941, -0.0731,  0.0918,  0.0247,
         -0.0655, -0.0685, -0.0875,  0.0479],
        [-0.0871,  0.0840, -0.0604, -0.0193, -0.0877, -0.0734,  0.0127, -0.0357,
          0.0891, -0.0521, -0.0469, -0.0877, -0.0775,  0.0123, -0.0194, -0.1078,
          0.0492, -0.1044, -0.0714,  0.0606,  0.0053, -0.0227,  0.0231, -0.0439,
         -0.0554,  0.0112, -0.0717,  0.0829, -0.0140,  0.0908, -0.0258,  0.0669,
         -0.0290, -0.0555,  0.1082, -0.0481, -0.0175, -0.0107, -0.0297, -0.0472,
         -0.0606, -0.0301, -0.0846, -0.0839,  0.0077,  0.0267,  0.0926, -0.0119,
         -0.0195,  0.0077,  0.0336,  0.0981,  0.0040, -0.0447, -0.0131, -0.1037,
         -0.1037,  0.0380,  0.0343,  0.1038,  0.0378, -0.0402, -0.0581, -0.0523,
          0.0604,  0.0402, -0.0838, -0.0718, -0.0488,  0.0242,  0.0727, -0.0429,
          0.0225, -0.0209,  0.0964,  0.0810, -0.0704,  0.0720, -0.0120,  0.0704,
         -0.0779,  0.0169, -0.0885,  0.0133],
        [ 0.0809, -0.0432, -0.0358,  0.0023, -0.0809, -0.0058, -0.0977,  0.0731,
          0.0576, -0.0883,  0.0758,  0.0168, -0.0107, -0.0284,  0.0640,  0.0921,
         -0.1068, -0.0934,  0.0853,  0.0940, -0.0152,  0.0899,  0.0431,  0.0047,
         -0.0164, -0.0339,  0.0470,  0.0021, -0.0960,  0.1058,  0.0646, -0.0491,
         -0.1037,  0.0242,  0.0083,  0.0221, -0.0810, -0.0367, -0.0478, -0.0849,
         -0.0836,  0.0972, -0.0792, -0.1085, -0.0072, -0.0512, -0.0573, -0.0067,
          0.0944,  0.0517,  0.0925,  0.0974,  0.0239,  0.1011, -0.1045, -0.0396,
          0.0565, -0.0717, -0.0250, -0.0896,  0.0717,  0.0155, -0.0818, -0.0084,
         -0.0022, -0.1046, -0.0607, -0.0657, -0.0968,  0.0873, -0.0598, -0.0593,
          0.0558, -0.1064,  0.0574,  0.1044, -0.0233, -0.0505,  0.1041,  0.1085,
         -0.0538, -0.0595,  0.0956,  0.0604],
        [ 0.1015, -0.0722,  0.0488,  0.0101,  0.0202, -0.0593,  0.0623, -0.0953,
          0.0665,  0.1086, -0.1021,  0.0690,  0.0366, -0.0012,  0.0754,  0.0651,
          0.0005,  0.0642,  0.0149,  0.0153, -0.0449, -0.0533, -0.0103, -0.0387,
         -0.0602,  0.1004,  0.0283,  0.0476,  0.0596,  0.0504,  0.0366, -0.0626,
         -0.0361, -0.0263, -0.1079,  0.0652, -0.1043,  0.0658, -0.1018,  0.0300,
         -0.0766, -0.0371, -0.0634, -0.0091, -0.0625,  0.0402, -0.0438, -0.0524,
         -0.0687,  0.0636,  0.0641,  0.0817, -0.1090,  0.0409, -0.0558,  0.1062,
          0.0414, -0.0629,  0.0131, -0.0563,  0.0378,  0.0774, -0.0750,  0.0777,
          0.0578, -0.0936,  0.0269, -0.0396, -0.0964, -0.0894, -0.0125, -0.0236,
          0.0014, -0.0819, -0.0124,  0.0247,  0.1023, -0.0884,  0.0624, -0.0268,
          0.0912,  0.0312, -0.0989, -0.0456],
        [-0.0516, -0.0043, -0.0102,  0.0299,  0.0665, -0.0009,  0.1026, -0.0169,
          0.0006,  0.0715,  0.1036,  0.0087,  0.0707, -0.0396, -0.0476, -0.0896,
          0.0608, -0.0180, -0.0367, -0.0653,  0.0630,  0.1080,  0.0807, -0.0254,
         -0.0631, -0.0055,  0.0611,  0.0971,  0.0037,  0.0689,  0.0861,  0.0770,
          0.0932,  0.0045, -0.0973, -0.0262, -0.0513, -0.0835, -0.0036,  0.0355,
         -0.0524, -0.0042,  0.0043,  0.0252,  0.0437, -0.0898, -0.0324,  0.0382,
          0.0322,  0.0678,  0.0735,  0.0450, -0.0094, -0.0311,  0.0541, -0.0942,
          0.0945,  0.0062,  0.0225,  0.0156, -0.0396,  0.0228,  0.0718, -0.0720,
         -0.1054,  0.0230,  0.0869,  0.0996,  0.0456,  0.0548,  0.0861,  0.0787,
          0.0159,  0.0857, -0.0790, -0.0252,  0.0639,  0.0059,  0.0513,  0.0966,
         -0.0251, -0.0521, -0.0341, -0.0758],
        [-0.0203, -0.0479, -0.0209, -0.0015, -0.0537, -0.0816, -0.0397,  0.0425,
         -0.0706, -0.0772,  0.0870,  0.0587, -0.0714,  0.0805, -0.0104,  0.0030,
         -0.0112, -0.0341,  0.0635, -0.0202,  0.0839, -0.0273,  0.0969, -0.1013,
         -0.0227,  0.0159, -0.0036,  0.0428, -0.0474,  0.0933, -0.0499,  0.0124,
         -0.0043,  0.0435,  0.0785, -0.0419,  0.0231,  0.1041, -0.0128, -0.0573,
          0.0456,  0.0235,  0.0565, -0.0868, -0.1073, -0.0534,  0.0074, -0.0648,
         -0.0178, -0.0929,  0.1065,  0.0231, -0.0270, -0.0733,  0.0656,  0.0857,
         -0.0941,  0.0018, -0.0426, -0.0115,  0.0978, -0.0780, -0.0622,  0.0838,
         -0.0606, -0.0661, -0.0331,  0.0457, -0.0763, -0.0903, -0.0381, -0.0198,
         -0.0369, -0.0135, -0.1033,  0.1031,  0.0206,  0.0382, -0.1005, -0.0188,
         -0.0711, -0.0649,  0.0517, -0.0060]])
fc3.bias 
 tensor([ 0.0853, -0.1027, -0.0893, -0.0900,  0.0732,  0.0718, -0.0705,  0.0435,
         0.0994,  0.0521])

Process finished with exit code 0
發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章