Skip to content

Commit b53ae26

Browse files
author
Umer Farooq
authored
Add files via upload
1 parent de9443e commit b53ae26

34 files changed

+9626
-0
lines changed
Binary file not shown.

Deep Learning and Neural Network/week2/week2-Logistic+Regression+with+a+Neural+Network+mindset.ipynb

+1,397
Large diffs are not rendered by default.

Deep Learning and Neural Network/week2/week2-Python+Basics+With+Numpy.ipynb

+1,198
Large diffs are not rendered by default.
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,66 @@
1+
import matplotlib.pyplot as plt
2+
import numpy as np
3+
import sklearn
4+
import sklearn.datasets
5+
import sklearn.linear_model
6+
7+
def plot_decision_boundary(model, X, y):
8+
# Set min and max values and give it some padding
9+
x_min, x_max = X[0, :].min() - 1, X[0, :].max() + 1
10+
y_min, y_max = X[1, :].min() - 1, X[1, :].max() + 1
11+
h = 0.01
12+
# Generate a grid of points with distance h between them
13+
xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))
14+
# Predict the function value for the whole grid
15+
Z = model(np.c_[xx.ravel(), yy.ravel()])
16+
Z = Z.reshape(xx.shape)
17+
# Plot the contour and training examples
18+
plt.contourf(xx, yy, Z, cmap=plt.cm.Spectral)
19+
plt.ylabel('x2')
20+
plt.xlabel('x1')
21+
plt.scatter(X[0, :], X[1, :], c=y, cmap=plt.cm.Spectral)
22+
23+
24+
def sigmoid(x):
25+
"""
26+
Compute the sigmoid of x
27+
28+
Arguments:
29+
x -- A scalar or numpy array of any size.
30+
31+
Return:
32+
s -- sigmoid(x)
33+
"""
34+
s = 1/(1+np.exp(-x))
35+
return s
36+
37+
def load_planar_dataset():
38+
np.random.seed(1)
39+
m = 400 # number of examples
40+
N = int(m/2) # number of points per class
41+
D = 2 # dimensionality
42+
X = np.zeros((m,D)) # data matrix where each row is a single example
43+
Y = np.zeros((m,1), dtype='uint8') # labels vector (0 for red, 1 for blue)
44+
a = 4 # maximum ray of the flower
45+
46+
for j in range(2):
47+
ix = range(N*j,N*(j+1))
48+
t = np.linspace(j*3.12,(j+1)*3.12,N) + np.random.randn(N)*0.2 # theta
49+
r = a*np.sin(4*t) + np.random.randn(N)*0.2 # radius
50+
X[ix] = np.c_[r*np.sin(t), r*np.cos(t)]
51+
Y[ix] = j
52+
53+
X = X.T
54+
Y = Y.T
55+
56+
return X, Y
57+
58+
def load_extra_datasets():
59+
N = 200
60+
noisy_circles = sklearn.datasets.make_circles(n_samples=N, factor=.5, noise=.3)
61+
noisy_moons = sklearn.datasets.make_moons(n_samples=N, noise=.2)
62+
blobs = sklearn.datasets.make_blobs(n_samples=N, random_state=5, n_features=2, centers=6)
63+
gaussian_quantiles = sklearn.datasets.make_gaussian_quantiles(mean=None, cov=0.5, n_samples=N, n_features=2, n_classes=2, shuffle=True, random_state=None)
64+
no_structure = np.random.rand(N, 2), np.random.rand(N, 2)
65+
66+
return noisy_circles, noisy_moons, blobs, gaussian_quantiles, no_structure
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,119 @@
1+
import numpy as np
2+
3+
def layer_sizes_test_case():
4+
np.random.seed(1)
5+
X_assess = np.random.randn(5, 3)
6+
Y_assess = np.random.randn(2, 3)
7+
return X_assess, Y_assess
8+
9+
def initialize_parameters_test_case():
10+
n_x, n_h, n_y = 2, 4, 1
11+
return n_x, n_h, n_y
12+
13+
def forward_propagation_test_case():
14+
np.random.seed(1)
15+
X_assess = np.random.randn(2, 3)
16+
17+
parameters = {'W1': np.array([[-0.00416758, -0.00056267],
18+
[-0.02136196, 0.01640271],
19+
[-0.01793436, -0.00841747],
20+
[ 0.00502881, -0.01245288]]),
21+
'W2': np.array([[-0.01057952, -0.00909008, 0.00551454, 0.02292208]]),
22+
'b1': np.array([[ 0.],
23+
[ 0.],
24+
[ 0.],
25+
[ 0.]]),
26+
'b2': np.array([[ 0.]])}
27+
28+
return X_assess, parameters
29+
30+
def compute_cost_test_case():
31+
np.random.seed(1)
32+
Y_assess = np.random.randn(1, 3)
33+
parameters = {'W1': np.array([[-0.00416758, -0.00056267],
34+
[-0.02136196, 0.01640271],
35+
[-0.01793436, -0.00841747],
36+
[ 0.00502881, -0.01245288]]),
37+
'W2': np.array([[-0.01057952, -0.00909008, 0.00551454, 0.02292208]]),
38+
'b1': np.array([[ 0.],
39+
[ 0.],
40+
[ 0.],
41+
[ 0.]]),
42+
'b2': np.array([[ 0.]])}
43+
44+
a2 = (np.array([[ 0.5002307 , 0.49985831, 0.50023963]]))
45+
46+
return a2, Y_assess, parameters
47+
48+
def backward_propagation_test_case():
49+
np.random.seed(1)
50+
X_assess = np.random.randn(2, 3)
51+
Y_assess = np.random.randn(1, 3)
52+
parameters = {'W1': np.array([[-0.00416758, -0.00056267],
53+
[-0.02136196, 0.01640271],
54+
[-0.01793436, -0.00841747],
55+
[ 0.00502881, -0.01245288]]),
56+
'W2': np.array([[-0.01057952, -0.00909008, 0.00551454, 0.02292208]]),
57+
'b1': np.array([[ 0.],
58+
[ 0.],
59+
[ 0.],
60+
[ 0.]]),
61+
'b2': np.array([[ 0.]])}
62+
63+
cache = {'A1': np.array([[-0.00616578, 0.0020626 , 0.00349619],
64+
[-0.05225116, 0.02725659, -0.02646251],
65+
[-0.02009721, 0.0036869 , 0.02883756],
66+
[ 0.02152675, -0.01385234, 0.02599885]]),
67+
'A2': np.array([[ 0.5002307 , 0.49985831, 0.50023963]]),
68+
'Z1': np.array([[-0.00616586, 0.0020626 , 0.0034962 ],
69+
[-0.05229879, 0.02726335, -0.02646869],
70+
[-0.02009991, 0.00368692, 0.02884556],
71+
[ 0.02153007, -0.01385322, 0.02600471]]),
72+
'Z2': np.array([[ 0.00092281, -0.00056678, 0.00095853]])}
73+
return parameters, cache, X_assess, Y_assess
74+
75+
def update_parameters_test_case():
76+
parameters = {'W1': np.array([[-0.00615039, 0.0169021 ],
77+
[-0.02311792, 0.03137121],
78+
[-0.0169217 , -0.01752545],
79+
[ 0.00935436, -0.05018221]]),
80+
'W2': np.array([[-0.0104319 , -0.04019007, 0.01607211, 0.04440255]]),
81+
'b1': np.array([[ -8.97523455e-07],
82+
[ 8.15562092e-06],
83+
[ 6.04810633e-07],
84+
[ -2.54560700e-06]]),
85+
'b2': np.array([[ 9.14954378e-05]])}
86+
87+
grads = {'dW1': np.array([[ 0.00023322, -0.00205423],
88+
[ 0.00082222, -0.00700776],
89+
[-0.00031831, 0.0028636 ],
90+
[-0.00092857, 0.00809933]]),
91+
'dW2': np.array([[ -1.75740039e-05, 3.70231337e-03, -1.25683095e-03,
92+
-2.55715317e-03]]),
93+
'db1': np.array([[ 1.05570087e-07],
94+
[ -3.81814487e-06],
95+
[ -1.90155145e-07],
96+
[ 5.46467802e-07]]),
97+
'db2': np.array([[ -1.08923140e-05]])}
98+
return parameters, grads
99+
100+
def nn_model_test_case():
101+
np.random.seed(1)
102+
X_assess = np.random.randn(2, 3)
103+
Y_assess = np.random.randn(1, 3)
104+
return X_assess, Y_assess
105+
106+
def predict_test_case():
107+
np.random.seed(1)
108+
X_assess = np.random.randn(2, 3)
109+
parameters = {'W1': np.array([[-0.00615039, 0.0169021 ],
110+
[-0.02311792, 0.03137121],
111+
[-0.0169217 , -0.01752545],
112+
[ 0.00935436, -0.05018221]]),
113+
'W2': np.array([[-0.0104319 , -0.04019007, 0.01607211, 0.04440255]]),
114+
'b1': np.array([[ -8.97523455e-07],
115+
[ 8.15562092e-06],
116+
[ 6.04810633e-07],
117+
[ -2.54560700e-06]]),
118+
'b2': np.array([[ 9.14954378e-05]])}
119+
return parameters, X_assess

Deep Learning and Neural Network/week3/week3-Planar+data+classification+with+one+hidden+layer.ipynb

+1,628
Large diffs are not rendered by default.
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,82 @@
1+
import numpy as np
2+
3+
def sigmoid(Z):
4+
"""
5+
Implements the sigmoid activation in numpy
6+
7+
Arguments:
8+
Z -- numpy array of any shape
9+
10+
Returns:
11+
A -- output of sigmoid(z), same shape as Z
12+
cache -- returns Z as well, useful during backpropagation
13+
"""
14+
15+
A = 1/(1+np.exp(-Z))
16+
cache = Z
17+
18+
return A, cache
19+
20+
def relu(Z):
21+
"""
22+
Implement the RELU function.
23+
24+
Arguments:
25+
Z -- Output of the linear layer, of any shape
26+
27+
Returns:
28+
A -- Post-activation parameter, of the same shape as Z
29+
cache -- a python dictionary containing "A" ; stored for computing the backward pass efficiently
30+
"""
31+
32+
A = np.maximum(0,Z)
33+
34+
assert(A.shape == Z.shape)
35+
36+
cache = Z
37+
return A, cache
38+
39+
40+
def relu_backward(dA, cache):
41+
"""
42+
Implement the backward propagation for a single RELU unit.
43+
44+
Arguments:
45+
dA -- post-activation gradient, of any shape
46+
cache -- 'Z' where we store for computing backward propagation efficiently
47+
48+
Returns:
49+
dZ -- Gradient of the cost with respect to Z
50+
"""
51+
52+
Z = cache
53+
dZ = np.array(dA, copy=True) # just converting dz to a correct object.
54+
55+
# When z <= 0, you should set dz to 0 as well.
56+
dZ[Z <= 0] = 0
57+
58+
assert (dZ.shape == Z.shape)
59+
60+
return dZ
61+
62+
def sigmoid_backward(dA, cache):
63+
"""
64+
Implement the backward propagation for a single SIGMOID unit.
65+
66+
Arguments:
67+
dA -- post-activation gradient, of any shape
68+
cache -- 'Z' where we store for computing backward propagation efficiently
69+
70+
Returns:
71+
dZ -- Gradient of the cost with respect to Z
72+
"""
73+
74+
Z = cache
75+
76+
s = 1/(1+np.exp(-Z))
77+
dZ = dA * s * (1-s)
78+
79+
assert (dZ.shape == Z.shape)
80+
81+
return dZ
82+
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,104 @@
1+
import numpy as np
2+
3+
def linear_forward_test_case():
4+
X = np.array([[-1.02387576, 1.12397796],
5+
[-1.62328545, 0.64667545],
6+
[-1.74314104, -0.59664964]])
7+
W = np.array([[ 0.74505627, 1.97611078, -1.24412333]])
8+
b = 5
9+
10+
return X, W, b
11+
12+
def linear_activation_forward_test_case():
13+
X = np.array([[-1.02387576, 1.12397796],
14+
[-1.62328545, 0.64667545],
15+
[-1.74314104, -0.59664964]])
16+
W = np.array([[ 0.74505627, 1.97611078, -1.24412333]])
17+
b = 5
18+
return X, W, b
19+
20+
def L_model_forward_test_case():
21+
X = np.array([[-1.02387576, 1.12397796],
22+
[-1.62328545, 0.64667545],
23+
[-1.74314104, -0.59664964]])
24+
parameters = {'W1': np.array([[ 1.62434536, -0.61175641, -0.52817175],
25+
[-1.07296862, 0.86540763, -2.3015387 ]]),
26+
'W2': np.array([[ 1.74481176, -0.7612069 ]]),
27+
'b1': np.array([[ 0.],
28+
[ 0.]]),
29+
'b2': np.array([[ 0.]])}
30+
return X, parameters
31+
32+
def compute_cost_test_case():
33+
Y = np.asarray([[1, 1, 1]])
34+
aL = np.array([[.8,.9,0.4]])
35+
36+
return Y, aL
37+
38+
def linear_backward_test_case():
39+
z, linear_cache = (np.array([[ 3.1980455 , 7.85763489]]), (np.array([[-1.02387576, 1.12397796],
40+
[-1.62328545, 0.64667545],
41+
[-1.74314104, -0.59664964]]), np.array([[ 0.74505627, 1.97611078, -1.24412333]]), 5))
42+
43+
return z, linear_cache
44+
45+
def linear_activation_backward_test_case():
46+
aL, linear_activation_cache = (np.array([[ 3.1980455 , 7.85763489]]), ((np.array([[-1.02387576, 1.12397796], [-1.62328545, 0.64667545], [-1.74314104, -0.59664964]]), np.array([[ 0.74505627, 1.97611078, -1.24412333]]), 5), np.array([[ 3.1980455 , 7.85763489]])))
47+
48+
return aL, linear_activation_cache
49+
50+
def L_model_backward_test_case():
51+
X = np.random.rand(3,2)
52+
Y = np.array([[1, 1]])
53+
parameters = {'W1': np.array([[ 1.78862847, 0.43650985, 0.09649747]]), 'b1': np.array([[ 0.]])}
54+
55+
aL, caches = (np.array([[ 0.60298372, 0.87182628]]), [((np.array([[ 0.20445225, 0.87811744],
56+
[ 0.02738759, 0.67046751],
57+
[ 0.4173048 , 0.55868983]]),
58+
np.array([[ 1.78862847, 0.43650985, 0.09649747]]),
59+
np.array([[ 0.]])),
60+
np.array([[ 0.41791293, 1.91720367]]))])
61+
62+
return X, Y, aL, caches
63+
64+
def update_parameters_test_case():
65+
parameters = {'W1': np.array([[ 1.78862847, 0.43650985, 0.09649747],
66+
[-1.8634927 , -0.2773882 , -0.35475898],
67+
[-0.08274148, -0.62700068, -0.04381817],
68+
[-0.47721803, -1.31386475, 0.88462238]]),
69+
'W2': np.array([[ 0.88131804, 1.70957306, 0.05003364, -0.40467741],
70+
[-0.54535995, -1.54647732, 0.98236743, -1.10106763],
71+
[-1.18504653, -0.2056499 , 1.48614836, 0.23671627]]),
72+
'W3': np.array([[-1.02378514, -0.7129932 , 0.62524497],
73+
[-0.16051336, -0.76883635, -0.23003072]]),
74+
'b1': np.array([[ 0.],
75+
[ 0.],
76+
[ 0.],
77+
[ 0.]]),
78+
'b2': np.array([[ 0.],
79+
[ 0.],
80+
[ 0.]]),
81+
'b3': np.array([[ 0.],
82+
[ 0.]])}
83+
grads = {'dW1': np.array([[ 0.63070583, 0.66482653, 0.18308507],
84+
[ 0. , 0. , 0. ],
85+
[ 0. , 0. , 0. ],
86+
[ 0. , 0. , 0. ]]),
87+
'dW2': np.array([[ 1.62934255, 0. , 0. , 0. ],
88+
[ 0. , 0. , 0. , 0. ],
89+
[ 0. , 0. , 0. , 0. ]]),
90+
'dW3': np.array([[-1.40260776, 0. , 0. ]]),
91+
'da1': np.array([[ 0.70760786, 0.65063504],
92+
[ 0.17268975, 0.15878569],
93+
[ 0.03817582, 0.03510211]]),
94+
'da2': np.array([[ 0.39561478, 0.36376198],
95+
[ 0.7674101 , 0.70562233],
96+
[ 0.0224596 , 0.02065127],
97+
[-0.18165561, -0.16702967]]),
98+
'da3': np.array([[ 0.44888991, 0.41274769],
99+
[ 0.31261975, 0.28744927],
100+
[-0.27414557, -0.25207283]]),
101+
'db1': 0.75937676204411464,
102+
'db2': 0.86163759922811056,
103+
'db3': -0.84161956022334572}
104+
return parameters, grads

0 commit comments

Comments
 (0)