杂项
- 1 激活函数
- 1.1 sigmoid
- 1.2 tanh
- 1.3 Relu
- 1.4 leakRelu
1 激活函数
常用的激活函数包括sigmoid + tanh + Relu + leakRelu
1.1 sigmoid
import torch
import numpy as np
import matplotlib.pyplot as plt
# sigmoid + tanh + Relu + leakRelu
## 1 sigmoid
### 1.1 代码复现sigmoid
#(1)函数
def sigmoid(x):return 1./(1+torch.exp(-x))def sigmoid_diff(x):t = sigmoid(x)return t*(1-t)
#(2))检测
x = torch.arange(3)
print(sigmoid(x))
print(sigmoid_diff(x))
print("#"*10)## 1.2 用torch检测sigmoid的导数
for i in range(3):x = torch.tensor([i],dtype=torch.float32,requires_grad=True)y = sigmoid(x)y.backward()print(x.grad.item(),end=", ")'''
tensor([0.5000, 0.7311, 0.8808])
tensor([0.2500, 0.1966, 0.1050])
##########
0.25, 0.1966119408607483, 0.10499356687068939
'''
1.2 tanh
import torch
import numpy as np
import matplotlib.pyplot as plt
## 2 thah
### 2.1 代码复现thah
#(1)函数
def thah(x):return 1.*(torch.exp(x)-torch.exp(-x))/(torch.exp(x)+torch.exp(-x))def thah_diff(x):t = torch.exp(2*x)return 4*t/(1+t)**2
#(2))检测
x = torch.arange(3)
print(thah(x))
print(thah_diff(x))
print("#"*10)## 2.2 用torch检测thah的导数
for i in range(3):x = torch.tensor([i],dtype=torch.float32,requires_grad=True)y = thah(x)y.backward()print(x.grad.item(),end=", ")
'''
tensor([0.0000, 0.7616, 0.9640])
tensor([1.0000, 0.4200, 0.0707])
##########
1.0,0.41997432708740234,0.07065081596374512
'''
1.3 Relu
import torch
import numpy as np
import matplotlib.pyplot as plt
## 3 Relu
### 3.1 代码复现Relu
#(1)函数
def Relu(x):return torch.maximum(torch.tensor(0),x)def Relu_diff(x):return torch.where(x>0,1,0)
#(2))检测
x = torch.arange(-2,4)print(Relu(x))
print(Relu_diff(x))
print("#"*10)# 3.2 用torch检测Relu的导数
for i in range(-2,4):x = torch.tensor([i],dtype=torch.float32,requires_grad=True)y = Relu(x)y.backward()print(x.grad.item(),end=", ")
'''
tensor([0, 0, 0, 1, 2, 3])
tensor([0, 0, 0, 1, 1, 1])
##########
0.0, 0.0, 0.5, 1.0, 1.0, 1.0,
'''
1.4 leakRelu
import torch
import numpy as np
import matplotlib.pyplot as plt
## 4 LeakRelu
### 4.1 代码复现LeakRelu
#(1)函数
def LeakRelu(x,a=0.01):return torch.maximum(torch.tensor(a),x)def LeakRelu_diff(x):return torch.where(x>0,1,0.01)
#(2))检测
x = torch.arange(3)
print(LeakRelu(x))
print(LeakRelu_diff(x))
print("#"*10)## 4.2 用torch检测LeakRelu的导数
for i in range(3):x = torch.tensor([i],dtype=torch.float32,requires_grad=True)y = LeakRelu(x)y.backward()print(x.grad.item(),end=", ")'''
tensor([0.0100, 1.0000, 2.0000])
tensor([0.0100, 1.0000, 1.0000])
##########
0.0, 1.0, 1.0, '''