diff tvii/logistic_regression.py @ 28:77f68c241b37

[logistic regression] propagate
author Jeff Hammel <k0scist@gmail.com>
date Mon, 04 Sep 2017 11:53:23 -0700
parents c52d8173b056
children ae0c345ea09d
line wrap: on
line diff
--- a/tvii/logistic_regression.py	Mon Sep 04 11:38:46 2017 -0700
+++ b/tvii/logistic_regression.py	Mon Sep 04 11:53:23 2017 -0700
@@ -43,12 +43,28 @@
     - Write your code step by step for the propagation. np.log(), np.dot()
     """
 
-    m = X.shape[1]
+
 
+    # FORWARD PROPAGATION (FROM X TO COST)
     cost = cost_function(w, b, X, Y)  # compute cost
 
-    A = sigmoid(w.T*X + b)  # compute activation
-    raise NotImplementedError('TODO')
+    # BACKWARD PROPAGATION (TO FIND GRADIENT)
+    m = X.shape[1]
+    A = sigmoid(np.dot(w.T, X) + b)  # compute activation
+    dw = (1./m)*np.dot(X, (A - Y).T)
+    db = (1./m)*np.sum(A - Y)
+
+    # sanity check
+    assert(A.shape[1] == m)
+    assert(dw.shape == w.shape), "dw.shape is {}; w.shape is {}".format(dw.shape, w.shape)
+    assert(db.dtype == float)
+    cost = np.squeeze(cost)
+    assert(cost.shape == ())
+
+    # return gradients
+    grads = {"dw": dw,
+             "db": db}
+    return grads, cost
 
 
 def cost_function(w, b, X, Y):