-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathactivations.py
More file actions
57 lines (39 loc) · 1.09 KB
/
activations.py
File metadata and controls
57 lines (39 loc) · 1.09 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
import matplotlib.pyplot as plt
import numpy as np
def compare(func0, func1):
x_values = np.arange(-50, 50) / 10.0
y0_values = func0(x_values)
y1_values = func1(x_values)
plt.ylim([-1.1, 1.1])
plt.plot(x_values, y0_values, x_values, y1_values)
def perceptron(x):
return np.sign(x)
def tanh(x):
return np.tanh(x)
def dtanh(y, dy):
return (1 - y**2) * dy
def relu(x):
return np.where(x > 0, x, 0)
def gelu(x):
return x * 0.5 * (1 + np.tanh(np.sqrt(2 / np.pi) * (x + 0.044715 * x**3)))
def main():
plt.rc('font', size=15)
plt.figure(figsize=(4, 4))
compare(perceptron, tanh)
plt.show()
plt.figure(figsize=(4, 4))
x_values = np.arange(-50, 50) / 10.0
plt.ylim(-5, 5)
plt.plot(x_values, relu(x_values))
plt.tight_layout()
plt.show()
plt.figure(figsize=(4, 4))
x_values = np.arange(-50, 50) / 10.0
plt.ylim(-1, 5)
plt.plot(x_values, relu(x_values), label='ReLU')
plt.plot(x_values, gelu(x_values), label='GELU')
plt.legend()
plt.tight_layout()
plt.show()
if __name__ == "__main__":
main()