Python小练习:激活函数
作者:凯鲁嘎吉 - 博客园 http://www.cnblogs.com/kailugaji/
本文介绍几种常见的激活函数,并用Python来实现,包括:Sigmoid、tanh、ReLU、LeakyReLU、ELU、Swish、softmax。
1. 常见激活函数定义
2. activation_function_test.py
1 # -*- coding: utf-8 -*- 2 # Author:凯鲁嘎吉 Coral Gajic 3 # https://www.cnblogs.com/kailugaji/ 4 # Python小练习:激活函数 5 # 用Python实现常见的激活函数:'sigmoid', 'tanh', 'relu', 'leakyrelu', 'elu', 'swish', 'softmax' 6 ''' 7 部分参考: 8 https://zhuanlan.zhihu.com/p/397494815 9 ''' 10 import numpy as np 11 import torch 12 import torch.nn.functional as F 13 import matplotlib.pyplot as plt 14 plt.rc('font',family='Times New Roman') 15 16 # 激活函数 17 def activation_function(index, x, gamma = None, dim = -1): 18 y = torch.empty([]) # 自己手动写的激活函数 19 z = torch.empty([]) # 调用Pytorch内置的激活函数 20 if index == 'sigmoid': 21 y = 1 / (1 + torch.exp(-x)) 22 z = torch.sigmoid(x) 23 elif index == 'tanh': 24 y = (torch.exp(x) - torch.exp(-x)) / (torch.exp(x) + torch.exp(-x)) 25 z = torch.tanh(x) 26 elif index == 'relu': 27 y = np.where(x >= 0, x, 0) 28 y = torch.tensor(y) 29 z = F.relu(x) 30 elif index == 'leakyrelu': 31 y = np.where(x > 0, x, x * gamma) 32 y = torch.tensor(y) 33 z = F.leaky_relu(x, gamma) 34 elif index == 'elu': 35 y = np.where(x > 0, x, gamma * (np.exp(x) - 1)) 36 y = torch.tensor(y) 37 z = F.elu(x,gamma) 38 elif index == 'swish': 39 y = x * (1 / (1 + torch.exp(-x))) 40 z = x * torch.sigmoid(x) 41 elif index == 'softmax': 42 y = torch.exp(x) / torch.exp(x).sum(dim = dim, keepdim = True) 43 z = F.softmax(x, dim = dim) 44 return y, z 45 46 torch.manual_seed(1) 47 x = torch.randn(2, 3) # 原始数据 48 print('原始数据:\n', x) 49 # activation_function()参数设置 50 index = ['sigmoid', 'tanh', 'relu', 'leakyrelu', 'elu', 'swish', 'softmax'] 51 gamma = 0.1 # 超参数 52 num = 4 # 小数点后保留几位 53 for idx in index: 54 y, z = activation_function(idx, x, gamma) 55 print('------------------------------------------') 56 print('激活函数为:', idx) 57 print('自己写的函数:\n', np.around(y, num)) 58 print('调用内置函数:\n', np.around(z, num)) 59 # --------------------画图------------------------ 60 # 手动设置横纵座标范围 61 plt.xlim([-4, 4]) 62 plt.ylim([-1, 4]) 63 x = np.linspace(-4, 4, 100, endpoint=True) 64 color = ['green', 'red', 'yellow', 'cyan', 'orangered', 'dodgerblue', 'black'] 65 ls = ['-', '-', ':', ':', ':', '-', '-'] 66 for i in range(len(index)): 67 _, z = activation_function(index[i], torch.tensor(x), gamma) 68 if color[i] == 'yellow': 69 plt.plot(x, z.numpy(), color=color[i], ls=ls[i], lw = 3, label=index[i]) 70 else: 71 plt.plot(x, z.numpy(), color=color[i], ls=ls[i], label=index[i]) 72 # 添加 y = 1,x = 0 基准线 73 plt.plot([x.min(), x.max()], [1, 1], color = 'gray', ls = '--', alpha = 0.3) 74 plt.plot([0, 0], [-1, 4], color = 'gray', ls = '--', alpha = 0.3) 75 # 添加x轴和y轴标签 76 plt.xlabel('x') 77 plt.ylabel('f(x)') 78 plt.legend(ncol = 1, fontsize='small', facecolor='lavenderblush', edgecolor='black') 79 plt.tight_layout() 80 plt.savefig('Activation Functions.png', bbox_inches='tight', dpi=500) 81 plt.show()
3. 结果
D:\ProgramData\Anaconda3\python.exe "D:/Python code/2023.3 exercise/Neural Network/activation_function_test.py" 原始数据: tensor([[ 0.6614, 0.2669, 0.0617], [ 0.6213, -0.4519, -0.1661]]) ------------------------------------------ 激活函数为: sigmoid 自己写的函数: tensor([[0.6596, 0.5663, 0.5154], [0.6505, 0.3889, 0.4586]]) 调用内置函数: tensor([[0.6596, 0.5663, 0.5154], [0.6505, 0.3889, 0.4586]]) ------------------------------------------ 激活函数为: tanh 自己写的函数: tensor([[ 0.5793, 0.2608, 0.0616], [ 0.5520, -0.4235, -0.1646]]) 调用内置函数: tensor([[ 0.5793, 0.2608, 0.0616], [ 0.5520, -0.4235, -0.1646]]) ------------------------------------------ 激活函数为: relu 自己写的函数: tensor([[0.6614, 0.2669, 0.0617], [0.6213, 0.0000, 0.0000]]) 调用内置函数: tensor([[0.6614, 0.2669, 0.0617], [0.6213, 0.0000, 0.0000]]) ------------------------------------------ 激活函数为: leakyrelu 自己写的函数: tensor([[ 0.6614, 0.2669, 0.0617], [ 0.6213, -0.0452, -0.0166]]) 调用内置函数: tensor([[ 0.6614, 0.2669, 0.0617], [ 0.6213, -0.0452, -0.0166]]) ------------------------------------------ 激活函数为: elu 自己写的函数: tensor([[ 0.6614, 0.2669, 0.0617], [ 0.6213, -0.0364, -0.0153]]) 调用内置函数: tensor([[ 0.6614, 0.2669, 0.0617], [ 0.6213, -0.0364, -0.0153]]) ------------------------------------------ 激活函数为: swish 自己写的函数: tensor([[ 0.4362, 0.1512, 0.0318], [ 0.4042, -0.1757, -0.0762]]) 调用内置函数: tensor([[ 0.4362, 0.1512, 0.0318], [ 0.4042, -0.1757, -0.0762]]) ------------------------------------------ 激活函数为: softmax 自己写的函数: tensor([[0.4498, 0.3032, 0.2470], [0.5565, 0.1903, 0.2532]]) 调用内置函数: tensor([[0.4498, 0.3032, 0.2470], [0.5565, 0.1903, 0.2532]]) Process finished with exit code 0
可以看到,自己写的激活函数与内置的结果一致。