|
# mish激活函数的pytorch和numpy实现
import torch
import numpy as np
import math
import matplotlib.pyplot as plt
# class Mish(torch.nn.Module):
# def __init__(self):
# super().__init__()
#
# def forward(self, x):
# x = x * (torch.tanh(torch.nn.functional.softplus(x)))
# return x
e = math.e
def tanh(x):
# 双曲正切
return (e ** x - e ** (-x)) / (e ** x + e ** (-x))
def softplus(x):
# softplus可以看作是ReLu的平滑。根据神经科学家的相关研究,
# softplus和ReLu与脑神经元激活频率函数有神似的地方
return math.log(1 + pow(e, x))
def mish(x):
return x * tanh(softplus(x))
x = np.linspace(-5, 5, 1000)
y = np.linspace(-5, 5, 1000)
for i in range(1000):
y = mish(x)
plt.plot(x, y, color='orange', linewidth=2, label='Mish')
plt.legend()
plt.savefig('mish.jpg')
plt.show()
|
|