Skip to content

Commit 0dc040e

Browse files
YunYang1994YunYang1994
YunYang1994
authored and
YunYang1994
committed
update
1 parent fae836f commit 0dc040e

File tree

6 files changed

+1702
-0
lines changed

6 files changed

+1702
-0
lines changed
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,80 @@
1+
{
2+
"cells": [
3+
{
4+
"cell_type": "code",
5+
"execution_count": 1,
6+
"metadata": {},
7+
"outputs": [
8+
{
9+
"data": {
10+
"text/plain": [
11+
"<Figure size 800x600 with 4 Axes>"
12+
]
13+
},
14+
"metadata": {},
15+
"output_type": "display_data"
16+
}
17+
],
18+
"source": [
19+
"import tensorflow as tf\n",
20+
"import numpy as np\n",
21+
"import matplotlib.pyplot as plt\n",
22+
"\n",
23+
"# fake data\n",
24+
"x = np.linspace(-5, 5, 100)\n",
25+
"\n",
26+
"# following are popular activation functions\n",
27+
"y_relu = tf.nn.relu(x)\n",
28+
"y_sigmoid = tf.nn.sigmoid(x)\n",
29+
"y_tanh = tf.nn.tanh(x)\n",
30+
"y_softplus = tf.nn.softplus(x)\n",
31+
"# y_softmax = tf.nn.softmax(x) softmax is a special kind of activation function, it is about probability\n",
32+
"\n",
33+
"# plt to visualize these activation function\n",
34+
"plt.figure(1, figsize=(8, 6))\n",
35+
"plt.subplot(221)\n",
36+
"plt.plot(x, y_relu, c='red', label='relu')\n",
37+
"plt.ylim((-1, 5))\n",
38+
"plt.legend(loc='best')\n",
39+
"\n",
40+
"plt.subplot(222)\n",
41+
"plt.plot(x, y_sigmoid, c='red', label='sigmoid')\n",
42+
"plt.ylim((-0.2, 1.2))\n",
43+
"plt.legend(loc='best')\n",
44+
"\n",
45+
"plt.subplot(223)\n",
46+
"plt.plot(x, y_tanh, c='red', label='tanh')\n",
47+
"plt.ylim((-1.2, 1.2))\n",
48+
"plt.legend(loc='best')\n",
49+
"\n",
50+
"plt.subplot(224)\n",
51+
"plt.plot(x, y_softplus, c='red', label='softplus')\n",
52+
"plt.ylim((-0.2, 6))\n",
53+
"plt.legend(loc='best')\n",
54+
"\n",
55+
"plt.show()"
56+
]
57+
}
58+
],
59+
"metadata": {
60+
"kernelspec": {
61+
"display_name": "Python 3",
62+
"language": "python",
63+
"name": "python3"
64+
},
65+
"language_info": {
66+
"codemirror_mode": {
67+
"name": "ipython",
68+
"version": 3
69+
},
70+
"file_extension": ".py",
71+
"mimetype": "text/x-python",
72+
"name": "python",
73+
"nbconvert_exporter": "python",
74+
"pygments_lexer": "ipython3",
75+
"version": "3.5.2"
76+
}
77+
},
78+
"nbformat": 4,
79+
"nbformat_minor": 2
80+
}

1-Introduction/activation.ipynb

+80
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,80 @@
1+
{
2+
"cells": [
3+
{
4+
"cell_type": "code",
5+
"execution_count": 1,
6+
"metadata": {},
7+
"outputs": [
8+
{
9+
"data": {
10+
"text/plain": [
11+
"<Figure size 800x600 with 4 Axes>"
12+
]
13+
},
14+
"metadata": {},
15+
"output_type": "display_data"
16+
}
17+
],
18+
"source": [
19+
"import tensorflow as tf\n",
20+
"import numpy as np\n",
21+
"import matplotlib.pyplot as plt\n",
22+
"\n",
23+
"# fake data\n",
24+
"x = np.linspace(-5, 5, 100)\n",
25+
"\n",
26+
"# following are popular activation functions\n",
27+
"y_relu = tf.nn.relu(x)\n",
28+
"y_sigmoid = tf.nn.sigmoid(x)\n",
29+
"y_tanh = tf.nn.tanh(x)\n",
30+
"y_softplus = tf.nn.softplus(x)\n",
31+
"# y_softmax = tf.nn.softmax(x) softmax is a special kind of activation function, it is about probability\n",
32+
"\n",
33+
"# plt to visualize these activation function\n",
34+
"plt.figure(1, figsize=(8, 6))\n",
35+
"plt.subplot(221)\n",
36+
"plt.plot(x, y_relu, c='red', label='relu')\n",
37+
"plt.ylim((-1, 5))\n",
38+
"plt.legend(loc='best')\n",
39+
"\n",
40+
"plt.subplot(222)\n",
41+
"plt.plot(x, y_sigmoid, c='red', label='sigmoid')\n",
42+
"plt.ylim((-0.2, 1.2))\n",
43+
"plt.legend(loc='best')\n",
44+
"\n",
45+
"plt.subplot(223)\n",
46+
"plt.plot(x, y_tanh, c='red', label='tanh')\n",
47+
"plt.ylim((-1.2, 1.2))\n",
48+
"plt.legend(loc='best')\n",
49+
"\n",
50+
"plt.subplot(224)\n",
51+
"plt.plot(x, y_softplus, c='red', label='softplus')\n",
52+
"plt.ylim((-0.2, 6))\n",
53+
"plt.legend(loc='best')\n",
54+
"\n",
55+
"plt.show()"
56+
]
57+
}
58+
],
59+
"metadata": {
60+
"kernelspec": {
61+
"display_name": "Python 3",
62+
"language": "python",
63+
"name": "python3"
64+
},
65+
"language_info": {
66+
"codemirror_mode": {
67+
"name": "ipython",
68+
"version": 3
69+
},
70+
"file_extension": ".py",
71+
"mimetype": "text/x-python",
72+
"name": "python",
73+
"nbconvert_exporter": "python",
74+
"pygments_lexer": "ipython3",
75+
"version": "3.5.2"
76+
}
77+
},
78+
"nbformat": 4,
79+
"nbformat_minor": 2
80+
}

1-Introduction/activation.py

+54
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,54 @@
1+
#! /usr/bin/env python
2+
# coding=utf-8
3+
#================================================================
4+
# Copyright (C) 2019 * Ltd. All rights reserved.
5+
#
6+
# Editor : VIM
7+
# File name : activation.py
8+
# Author : YunYang1994
9+
# Created date: 2019-03-08 22:05:51
10+
# Description :
11+
#
12+
#================================================================
13+
14+
import tensorflow as tf
15+
import numpy as np
16+
import matplotlib.pyplot as plt
17+
18+
# fake data
19+
x = np.linspace(-5, 5, 100)
20+
21+
# following are popular activation functions
22+
y_relu = tf.nn.relu(x)
23+
y_sigmoid = tf.nn.sigmoid(x)
24+
y_tanh = tf.nn.tanh(x)
25+
y_softplus = tf.nn.softplus(x)
26+
# y_softmax = tf.nn.softmax(x) softmax is a special kind of activation function, it is about probability
27+
28+
# plt to visualize these activation function
29+
plt.figure(1, figsize=(8, 6))
30+
plt.subplot(221)
31+
plt.plot(x, y_relu, c='red', label='relu')
32+
plt.ylim((-1, 5))
33+
plt.legend(loc='best')
34+
35+
plt.subplot(222)
36+
plt.plot(x, y_sigmoid, c='red', label='sigmoid')
37+
plt.ylim((-0.2, 1.2))
38+
plt.legend(loc='best')
39+
40+
plt.subplot(223)
41+
plt.plot(x, y_tanh, c='red', label='tanh')
42+
plt.ylim((-1.2, 1.2))
43+
plt.legend(loc='best')
44+
45+
plt.subplot(224)
46+
plt.plot(x, y_softplus, c='red', label='softplus')
47+
plt.ylim((-0.2, 6))
48+
plt.legend(loc='best')
49+
50+
plt.show()
51+
52+
53+
54+

0 commit comments

Comments
 (0)