-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathplt_logistic_loss.py
191 lines (162 loc) · 7.01 KB
/
plt_logistic_loss.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
"""----------------------------------------------------------------
logistic_loss plotting routines and support
"""
from matplotlib import cm
from lab_utils_week3 import sigmoid, dlblue, dlorange, np, plt, compute_cost_matrix
def compute_cost_logistic_sq_err(X, y, w, b):
"""
compute sq error cost on logicist data (for negative example only, not used in practice)
Args:
X (ndarray): Shape (m,n) matrix of examples with multiple features
w (ndarray): Shape (n) parameters for prediction
b (scalar): parameter for prediction
Returns:
cost (scalar): cost
"""
m = X.shape[0]
cost = 0.0
for i in range(m):
z_i = np.dot(X[i],w) + b
f_wb_i = sigmoid(z_i) #add sigmoid to normal sq error cost for linear regression
cost = cost + (f_wb_i - y[i])**2
cost = cost / (2 * m)
return np.squeeze(cost)
def plt_logistic_squared_error(X,y):
""" plots logistic squared error for demonstration """
wx, by = np.meshgrid(np.linspace(-6,12,50),
np.linspace(10, -20, 40))
points = np.c_[wx.ravel(), by.ravel()]
cost = np.zeros(points.shape[0])
for i in range(points.shape[0]):
w,b = points[i]
cost[i] = compute_cost_logistic_sq_err(X.reshape(-1,1), y, w, b)
cost = cost.reshape(wx.shape)
fig = plt.figure()
fig.canvas.toolbar_visible = False
fig.canvas.header_visible = False
fig.canvas.footer_visible = False
ax = fig.add_subplot(1, 1, 1, projection='3d')
ax.plot_surface(wx, by, cost, alpha=0.6,cmap=cm.jet,)
ax.set_xlabel('w', fontsize=16)
ax.set_ylabel('b', fontsize=16)
ax.set_zlabel("Cost", rotation=90, fontsize=16)
ax.set_title('"Logistic" Squared Error Cost vs (w, b)')
ax.xaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
ax.yaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
ax.zaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
def plt_logistic_cost(X,y):
""" plots logistic cost """
wx, by = np.meshgrid(np.linspace(-6,12,50),
np.linspace(0, -20, 40))
points = np.c_[wx.ravel(), by.ravel()]
cost = np.zeros(points.shape[0],dtype=np.longdouble)
for i in range(points.shape[0]):
w,b = points[i]
cost[i] = compute_cost_matrix(X.reshape(-1,1), y, w, b, logistic=True, safe=True)
cost = cost.reshape(wx.shape)
fig = plt.figure(figsize=(9,5))
fig.canvas.toolbar_visible = False
fig.canvas.header_visible = False
fig.canvas.footer_visible = False
ax = fig.add_subplot(1, 2, 1, projection='3d')
ax.plot_surface(wx, by, cost, alpha=0.6,cmap=cm.jet,)
ax.set_xlabel('w', fontsize=16)
ax.set_ylabel('b', fontsize=16)
ax.set_zlabel("Cost", rotation=90, fontsize=16)
ax.set_title('Logistic Cost vs (w, b)')
ax.xaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
ax.yaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
ax.zaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
ax = fig.add_subplot(1, 2, 2, projection='3d')
ax.plot_surface(wx, by, np.log(cost), alpha=0.6,cmap=cm.jet,)
ax.set_xlabel('w', fontsize=16)
ax.set_ylabel('b', fontsize=16)
ax.set_zlabel('\nlog(Cost)', fontsize=16)
ax.set_title('log(Logistic Cost) vs (w, b)')
ax.xaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
ax.yaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
ax.zaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
plt.show()
return cost
def soup_bowl():
""" creates 3D quadratic error surface """
#Create figure and plot with a 3D projection
fig = plt.figure(figsize=(4,4))
fig.canvas.toolbar_visible = False
fig.canvas.header_visible = False
fig.canvas.footer_visible = False
#Plot configuration
ax = fig.add_subplot(111, projection='3d')
ax.xaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
ax.yaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
ax.zaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
ax.zaxis.set_rotate_label(False)
ax.view_init(15, -120)
#Useful linearspaces to give values to the parameters w and b
w = np.linspace(-20, 20, 100)
b = np.linspace(-20, 20, 100)
#Get the z value for a bowl-shaped cost function
z=np.zeros((len(w), len(b)))
j=0
for x in w:
i=0
for y in b:
z[i,j] = x**2 + y**2
i+=1
j+=1
#Meshgrid used for plotting 3D functions
W, B = np.meshgrid(w, b)
#Create the 3D surface plot of the bowl-shaped cost function
ax.plot_surface(W, B, z, cmap = "Spectral_r", alpha=0.7, antialiased=False)
ax.plot_wireframe(W, B, z, color='k', alpha=0.1)
ax.set_xlabel("$w$")
ax.set_ylabel("$b$")
ax.set_zlabel("Cost", rotation=90)
ax.set_title("Squared Error Cost used in Linear Regression")
plt.show()
def plt_simple_example(x, y):
""" plots tumor data """
pos = y == 1
neg = y == 0
fig,ax = plt.subplots(1,1,figsize=(5,3))
fig.canvas.toolbar_visible = False
fig.canvas.header_visible = False
fig.canvas.footer_visible = False
ax.scatter(x[pos], y[pos], marker='x', s=80, c = 'red', label="malignant")
ax.scatter(x[neg], y[neg], marker='o', s=100, label="benign", facecolors='none', edgecolors=dlblue,lw=3)
ax.set_ylim(-0.075,1.1)
ax.set_ylabel('y')
ax.set_xlabel('Tumor Size')
ax.legend(loc='lower right')
ax.set_title("Example of Logistic Regression on Categorical Data")
def plt_two_logistic_loss_curves():
""" plots the logistic loss """
fig,ax = plt.subplots(1,2,figsize=(6,3),sharey=True)
fig.canvas.toolbar_visible = False
fig.canvas.header_visible = False
fig.canvas.footer_visible = False
x = np.linspace(0.01,1-0.01,20)
ax[0].plot(x,-np.log(x))
#ax[0].set_title("y = 1")
ax[0].text(0.5, 4.0, "y = 1", fontsize=12)
ax[0].set_ylabel("loss")
ax[0].set_xlabel(r"$f_{w,b}(x)$")
ax[1].plot(x,-np.log(1-x))
#ax[1].set_title("y = 0")
ax[1].text(0.5, 4.0, "y = 0", fontsize=12)
ax[1].set_xlabel(r"$f_{w,b}(x)$")
ax[0].annotate("prediction \nmatches \ntarget ", xy= [1,0], xycoords='data',
xytext=[-10,30],textcoords='offset points', ha="right", va="center",
arrowprops={'arrowstyle': '->', 'color': dlorange, 'lw': 3},)
ax[0].annotate("loss increases as prediction\n differs from target", xy= [0.1,-np.log(0.1)], xycoords='data',
xytext=[10,30],textcoords='offset points', ha="left", va="center",
arrowprops={'arrowstyle': '->', 'color': dlorange, 'lw': 3},)
ax[1].annotate("prediction \nmatches \ntarget ", xy= [0,0], xycoords='data',
xytext=[10,30],textcoords='offset points', ha="left", va="center",
arrowprops={'arrowstyle': '->', 'color': dlorange, 'lw': 3},)
ax[1].annotate("loss increases as prediction\n differs from target", xy= [0.9,-np.log(1-0.9)], xycoords='data',
xytext=[-10,30],textcoords='offset points', ha="right", va="center",
arrowprops={'arrowstyle': '->', 'color': dlorange, 'lw': 3},)
plt.suptitle("Loss Curves for Two Categorical Target Values", fontsize=12)
plt.tight_layout()
plt.show()