annotate tests/test_logistic_regression.py @ 64:2adeb95cf4d5

add more useful software + an idea
author Jeff Hammel <k0scist@gmail.com>
date Sun, 17 Dec 2017 12:41:26 -0800
parents 0f29b02f4806
children
Ignore whitespace changes - Everywhere: Within whitespace: At end of lines:
rev   line source
11
b6a146f0a61b [logistic regression] stubbing
Jeff Hammel <k0scist@gmail.com>
parents:
diff changeset
1 #!/usr/bin/env python
b6a146f0a61b [logistic regression] stubbing
Jeff Hammel <k0scist@gmail.com>
parents:
diff changeset
2
b6a146f0a61b [logistic regression] stubbing
Jeff Hammel <k0scist@gmail.com>
parents:
diff changeset
3 """
b6a146f0a61b [logistic regression] stubbing
Jeff Hammel <k0scist@gmail.com>
parents:
diff changeset
4 test logistic regression
b6a146f0a61b [logistic regression] stubbing
Jeff Hammel <k0scist@gmail.com>
parents:
diff changeset
5 """
b6a146f0a61b [logistic regression] stubbing
Jeff Hammel <k0scist@gmail.com>
parents:
diff changeset
6
22
3713c6733990 [logistic regression] introduce illustrative test
Jeff Hammel <k0scist@gmail.com>
parents: 11
diff changeset
7 import numpy as np
11
b6a146f0a61b [logistic regression] stubbing
Jeff Hammel <k0scist@gmail.com>
parents:
diff changeset
8 import os
b6a146f0a61b [logistic regression] stubbing
Jeff Hammel <k0scist@gmail.com>
parents:
diff changeset
9 import unittest
b6a146f0a61b [logistic regression] stubbing
Jeff Hammel <k0scist@gmail.com>
parents:
diff changeset
10 from tvii import logistic_regression
b6a146f0a61b [logistic regression] stubbing
Jeff Hammel <k0scist@gmail.com>
parents:
diff changeset
11
28
77f68c241b37 [logistic regression] propagate
Jeff Hammel <k0scist@gmail.com>
parents: 23
diff changeset
12
11
b6a146f0a61b [logistic regression] stubbing
Jeff Hammel <k0scist@gmail.com>
parents:
diff changeset
13 class LogisticRegresionTests(unittest.TestCase):
22
3713c6733990 [logistic regression] introduce illustrative test
Jeff Hammel <k0scist@gmail.com>
parents: 11
diff changeset
14
31
fa7a51df0d90 [logistic regression] test gradient descent
Jeff Hammel <k0scist@gmail.com>
parents: 29
diff changeset
15 def compare_arrays(self, a, b):
fa7a51df0d90 [logistic regression] test gradient descent
Jeff Hammel <k0scist@gmail.com>
parents: 29
diff changeset
16 assert a.shape == b.shape
fa7a51df0d90 [logistic regression] test gradient descent
Jeff Hammel <k0scist@gmail.com>
parents: 29
diff changeset
17 for x, y in zip(a.flatten(),
fa7a51df0d90 [logistic regression] test gradient descent
Jeff Hammel <k0scist@gmail.com>
parents: 29
diff changeset
18 b.flatten()):
fa7a51df0d90 [logistic regression] test gradient descent
Jeff Hammel <k0scist@gmail.com>
parents: 29
diff changeset
19 self.assertAlmostEqual(x, y)
fa7a51df0d90 [logistic regression] test gradient descent
Jeff Hammel <k0scist@gmail.com>
parents: 29
diff changeset
20
fa7a51df0d90 [logistic regression] test gradient descent
Jeff Hammel <k0scist@gmail.com>
parents: 29
diff changeset
21
22
3713c6733990 [logistic regression] introduce illustrative test
Jeff Hammel <k0scist@gmail.com>
parents: 11
diff changeset
22 def test_cost(self):
3713c6733990 [logistic regression] introduce illustrative test
Jeff Hammel <k0scist@gmail.com>
parents: 11
diff changeset
23 """test cost function"""
3713c6733990 [logistic regression] introduce illustrative test
Jeff Hammel <k0scist@gmail.com>
parents: 11
diff changeset
24
3713c6733990 [logistic regression] introduce illustrative test
Jeff Hammel <k0scist@gmail.com>
parents: 11
diff changeset
25 w, b, X, Y = (np.array([[1],[2]]),
3713c6733990 [logistic regression] introduce illustrative test
Jeff Hammel <k0scist@gmail.com>
parents: 11
diff changeset
26 2,
3713c6733990 [logistic regression] introduce illustrative test
Jeff Hammel <k0scist@gmail.com>
parents: 11
diff changeset
27 np.array([[1,2],[3,4]]),
3713c6733990 [logistic regression] introduce illustrative test
Jeff Hammel <k0scist@gmail.com>
parents: 11
diff changeset
28 np.array([[1,0]]))
3713c6733990 [logistic regression] introduce illustrative test
Jeff Hammel <k0scist@gmail.com>
parents: 11
diff changeset
29
3713c6733990 [logistic regression] introduce illustrative test
Jeff Hammel <k0scist@gmail.com>
parents: 11
diff changeset
30 expected_cost = 6.000064773192205
3713c6733990 [logistic regression] introduce illustrative test
Jeff Hammel <k0scist@gmail.com>
parents: 11
diff changeset
31 cost = logistic_regression.cost_function(w, b, X, Y)
23
f34110e28a0a [logistic regression] we have a working cost function
Jeff Hammel <k0scist@gmail.com>
parents: 22
diff changeset
32 assert abs(cost - expected_cost) < 1e-6
11
b6a146f0a61b [logistic regression] stubbing
Jeff Hammel <k0scist@gmail.com>
parents:
diff changeset
33
28
77f68c241b37 [logistic regression] propagate
Jeff Hammel <k0scist@gmail.com>
parents: 23
diff changeset
34 def test_propagate(self):
77f68c241b37 [logistic regression] propagate
Jeff Hammel <k0scist@gmail.com>
parents: 23
diff changeset
35 """test canned logistic regression example"""
77f68c241b37 [logistic regression] propagate
Jeff Hammel <k0scist@gmail.com>
parents: 23
diff changeset
36
77f68c241b37 [logistic regression] propagate
Jeff Hammel <k0scist@gmail.com>
parents: 23
diff changeset
37 # sample variables
77f68c241b37 [logistic regression] propagate
Jeff Hammel <k0scist@gmail.com>
parents: 23
diff changeset
38 w = np.array([[1],[2]])
77f68c241b37 [logistic regression] propagate
Jeff Hammel <k0scist@gmail.com>
parents: 23
diff changeset
39 b = 2
77f68c241b37 [logistic regression] propagate
Jeff Hammel <k0scist@gmail.com>
parents: 23
diff changeset
40 X = np.array([[1,2],[3,4]])
77f68c241b37 [logistic regression] propagate
Jeff Hammel <k0scist@gmail.com>
parents: 23
diff changeset
41 Y = np.array([[1,0]])
77f68c241b37 [logistic regression] propagate
Jeff Hammel <k0scist@gmail.com>
parents: 23
diff changeset
42
77f68c241b37 [logistic regression] propagate
Jeff Hammel <k0scist@gmail.com>
parents: 23
diff changeset
43 # calculate gradient and cost
77f68c241b37 [logistic regression] propagate
Jeff Hammel <k0scist@gmail.com>
parents: 23
diff changeset
44 grads, cost = logistic_regression.propagate(w, b, X, Y)
77f68c241b37 [logistic regression] propagate
Jeff Hammel <k0scist@gmail.com>
parents: 23
diff changeset
45
77f68c241b37 [logistic regression] propagate
Jeff Hammel <k0scist@gmail.com>
parents: 23
diff changeset
46 # compare to expected,
29
cf7584f0a29f test linear regression
Jeff Hammel <k0scist@gmail.com>
parents: 28
diff changeset
47 dw_expected = np.array([[ 0.99993216], [ 1.99980262]])
28
77f68c241b37 [logistic regression] propagate
Jeff Hammel <k0scist@gmail.com>
parents: 23
diff changeset
48 db_expected = 0.499935230625
77f68c241b37 [logistic regression] propagate
Jeff Hammel <k0scist@gmail.com>
parents: 23
diff changeset
49 cost_expected = 6.000064773192205
77f68c241b37 [logistic regression] propagate
Jeff Hammel <k0scist@gmail.com>
parents: 23
diff changeset
50
29
cf7584f0a29f test linear regression
Jeff Hammel <k0scist@gmail.com>
parents: 28
diff changeset
51 self.assertAlmostEqual(cost_expected, cost)
cf7584f0a29f test linear regression
Jeff Hammel <k0scist@gmail.com>
parents: 28
diff changeset
52 self.assertAlmostEqual(grads['db'], db_expected)
cf7584f0a29f test linear regression
Jeff Hammel <k0scist@gmail.com>
parents: 28
diff changeset
53 assert grads['dw'].shape == dw_expected.shape
cf7584f0a29f test linear regression
Jeff Hammel <k0scist@gmail.com>
parents: 28
diff changeset
54 for a, b in zip(grads['dw'].flatten(),
cf7584f0a29f test linear regression
Jeff Hammel <k0scist@gmail.com>
parents: 28
diff changeset
55 dw_expected.flatten()):
cf7584f0a29f test linear regression
Jeff Hammel <k0scist@gmail.com>
parents: 28
diff changeset
56 self.assertAlmostEqual(a, b)
cf7584f0a29f test linear regression
Jeff Hammel <k0scist@gmail.com>
parents: 28
diff changeset
57
31
fa7a51df0d90 [logistic regression] test gradient descent
Jeff Hammel <k0scist@gmail.com>
parents: 29
diff changeset
58 def test_optimize(self):
fa7a51df0d90 [logistic regression] test gradient descent
Jeff Hammel <k0scist@gmail.com>
parents: 29
diff changeset
59 """test gradient descent method"""
fa7a51df0d90 [logistic regression] test gradient descent
Jeff Hammel <k0scist@gmail.com>
parents: 29
diff changeset
60
fa7a51df0d90 [logistic regression] test gradient descent
Jeff Hammel <k0scist@gmail.com>
parents: 29
diff changeset
61 # test examples
32
0f29b02f4806 [logistic regression] add model
Jeff Hammel <k0scist@gmail.com>
parents: 31
diff changeset
62 w, b, X, Y = (np.array([[1],[2]]), 2, np.array([[1,2],[3,4]]), np.array([[1,0]]))
31
fa7a51df0d90 [logistic regression] test gradient descent
Jeff Hammel <k0scist@gmail.com>
parents: 29
diff changeset
63
fa7a51df0d90 [logistic regression] test gradient descent
Jeff Hammel <k0scist@gmail.com>
parents: 29
diff changeset
64 params, grads, costs = logistic_regression.optimize(w, b, X, Y, num_iterations= 100, learning_rate = 0.009, print_cost = False)
fa7a51df0d90 [logistic regression] test gradient descent
Jeff Hammel <k0scist@gmail.com>
parents: 29
diff changeset
65
fa7a51df0d90 [logistic regression] test gradient descent
Jeff Hammel <k0scist@gmail.com>
parents: 29
diff changeset
66 # expected output
fa7a51df0d90 [logistic regression] test gradient descent
Jeff Hammel <k0scist@gmail.com>
parents: 29
diff changeset
67 w_expected = np.array([[0.1124579 ],
fa7a51df0d90 [logistic regression] test gradient descent
Jeff Hammel <k0scist@gmail.com>
parents: 29
diff changeset
68 [0.23106775]])
fa7a51df0d90 [logistic regression] test gradient descent
Jeff Hammel <k0scist@gmail.com>
parents: 29
diff changeset
69 dw_expected = np.array([[ 0.90158428],
fa7a51df0d90 [logistic regression] test gradient descent
Jeff Hammel <k0scist@gmail.com>
parents: 29
diff changeset
70 [ 1.76250842]])
fa7a51df0d90 [logistic regression] test gradient descent
Jeff Hammel <k0scist@gmail.com>
parents: 29
diff changeset
71 b_expected = 1.55930492484
fa7a51df0d90 [logistic regression] test gradient descent
Jeff Hammel <k0scist@gmail.com>
parents: 29
diff changeset
72 db_expected = 0.430462071679
fa7a51df0d90 [logistic regression] test gradient descent
Jeff Hammel <k0scist@gmail.com>
parents: 29
diff changeset
73
fa7a51df0d90 [logistic regression] test gradient descent
Jeff Hammel <k0scist@gmail.com>
parents: 29
diff changeset
74 # compare output
fa7a51df0d90 [logistic regression] test gradient descent
Jeff Hammel <k0scist@gmail.com>
parents: 29
diff changeset
75 self.assertAlmostEqual(params['b'], b_expected)
fa7a51df0d90 [logistic regression] test gradient descent
Jeff Hammel <k0scist@gmail.com>
parents: 29
diff changeset
76 self.assertAlmostEqual(grads['db'], db_expected)
fa7a51df0d90 [logistic regression] test gradient descent
Jeff Hammel <k0scist@gmail.com>
parents: 29
diff changeset
77 self.compare_arrays(w_expected, params['w'])
fa7a51df0d90 [logistic regression] test gradient descent
Jeff Hammel <k0scist@gmail.com>
parents: 29
diff changeset
78 self.compare_arrays(dw_expected, grads['dw'])
fa7a51df0d90 [logistic regression] test gradient descent
Jeff Hammel <k0scist@gmail.com>
parents: 29
diff changeset
79
32
0f29b02f4806 [logistic regression] add model
Jeff Hammel <k0scist@gmail.com>
parents: 31
diff changeset
80 def test_predict(self):
0f29b02f4806 [logistic regression] add model
Jeff Hammel <k0scist@gmail.com>
parents: 31
diff changeset
81
0f29b02f4806 [logistic regression] add model
Jeff Hammel <k0scist@gmail.com>
parents: 31
diff changeset
82 w, b, X, Y = (np.array([[1],[2]]), 2, np.array([[1,2],[3,4]]), np.array([[1,0]]))
0f29b02f4806 [logistic regression] add model
Jeff Hammel <k0scist@gmail.com>
parents: 31
diff changeset
83
0f29b02f4806 [logistic regression] add model
Jeff Hammel <k0scist@gmail.com>
parents: 31
diff changeset
84 predictions = logistic_regression.predict(w, b, X)
0f29b02f4806 [logistic regression] add model
Jeff Hammel <k0scist@gmail.com>
parents: 31
diff changeset
85
0f29b02f4806 [logistic regression] add model
Jeff Hammel <k0scist@gmail.com>
parents: 31
diff changeset
86 assert predictions[0][0] == 1
0f29b02f4806 [logistic regression] add model
Jeff Hammel <k0scist@gmail.com>
parents: 31
diff changeset
87 assert predictions[0][1] == 1
28
77f68c241b37 [logistic regression] propagate
Jeff Hammel <k0scist@gmail.com>
parents: 23
diff changeset
88
11
b6a146f0a61b [logistic regression] stubbing
Jeff Hammel <k0scist@gmail.com>
parents:
diff changeset
89 if __name__ == '__main__':
b6a146f0a61b [logistic regression] stubbing
Jeff Hammel <k0scist@gmail.com>
parents:
diff changeset
90 unittest.main()