changeset 80:3c7927f59b05

notes to self re deep neural networks
author Jeff Hammel <k0scist@gmail.com>
date Sun, 17 Dec 2017 13:43:42 -0800
parents cecea2334eef
children 990a27e125f2
files tvii/deep.py
diffstat 1 files changed, 44 insertions(+), 0 deletions(-) [+]
line wrap: on
line diff
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tvii/deep.py	Sun Dec 17 13:43:42 2017 -0800
@@ -0,0 +1,44 @@
+"""
+Deep neural networks
+
+Forward propagation for layer `l`
+
+Input: a[l-1]
+
+Output: a[l], cache(z[l] {w[l], b[1]})
+
+z[l] = w[l] ...
+
+---
+
+Backward propagation for layer `l`:
+
+Input: da[l]
+Output: da[l-1], dW[l], db[l]
+
+dz[l] = da[l]* g[l]'(z[l])
+dw[l] = dz[l] a[l-1]
+db[l] = dz[l]
+dz[l-1] w[l].T dz[l]
+
+dz[l] = w[l+1].T dz[l+1] * g[l]' ( z[l] )
+
+=>
+
+dZ[l] dZ[l] * g[l]' ( Z[l] )
+
+dW[l] (1/m) dZ[l] A[l-1].T
+
+db[l] = (1/m) np.dum(dZ[l], axis=1, keepdims=True)
+dA[l-1] = W[l].T * dZ[l]
+
+
+For the final layerL
+da[l] = - (y/a) + (1 - y)/(1-a)
+dA[l] = (-(y(1)/a(1) + (1 - y(1))/(1 - a(1)) # first training example
+        ...)
+
+
+The weight matrix for layer `l`, W[l] is
+of the shape (n[l], n[l-1])
+"""