1.pytorch常用激活函数如下:
#ReLU激活函数 #Leaky ReLU激活函数 #Sigmoid激活函数 #Tanh激活函数 #Softmax激活函数 #Softplus
2.代码
import torch.nn as nn
import torch
import numpy
from torch.utils.tensorboard import SummaryWriterwriter = SummaryWriter("logs")input = torch.arange(-10,10,dtype=float)#ReLU激活函数
relu = nn.ReLU()
output_relu = relu(input)#Leaky ReLU激活函数
LeakyReLU=nn.LeakyReLU()
output_LeakyReLU = LeakyReLU(input)#Sigmoid激活函数
Sigmoid = nn.Sigmoid()
output_Sigmoid = Sigmoid(input)#Tanh激活函数
Tanh = nn.Tanh()
output_Tanh = Tanh(input)#Softmax激活函数
Softmax = nn.Softmax(dim=0)
output_Softmax = Softmax(input)Softplus = torch.nn.Softplus()
output_Softplus = Softplus(input)for i in range(len(input)):writer.add_scalar('Relu',output_relu[i],input[i])writer.add_scalar('LeakyReLU',output_LeakyReLU[i],input[i])writer.add_scalar('Sigmoid',output_Sigmoid[i],input[i])writer.add_scalar('Tanh',output_Tanh[i],input[i])writer.add_scalar('Softmax',output_Softmax[i],input[i])writer.add_scalar('Softplus',output_Softplus[i],input[i])# writer.add_scalars('run_14h', {'Relu': output_relu[i],# 'LeakyReLU': output_LeakyReLU[i],# 'Sigmoid': output_Sigmoid[i]}, input[i])writer.close()
3.结果显示: