view tests/test_logistic_regression.py @ 60:38fe99071b11

[documentation] install editable instructions that work
author Jeff Hammel <k0scist@gmail.com>
date Sat, 02 Dec 2017 10:22:25 -0800
parents 0f29b02f4806
children
line wrap: on
line source

#!/usr/bin/env python

"""
test logistic regression
"""

import numpy as np
import os
import unittest
from tvii import logistic_regression


class LogisticRegresionTests(unittest.TestCase):

    def compare_arrays(self, a, b):
        assert a.shape == b.shape
        for x, y in zip(a.flatten(),
                        b.flatten()):
            self.assertAlmostEqual(x, y)


    def test_cost(self):
        """test cost function"""

        w, b, X, Y = (np.array([[1],[2]]),
                      2,
                      np.array([[1,2],[3,4]]),
                      np.array([[1,0]]))

        expected_cost = 6.000064773192205
        cost = logistic_regression.cost_function(w, b, X, Y)
        assert abs(cost - expected_cost) < 1e-6

    def test_propagate(self):
        """test canned logistic regression example"""

        # sample variables
        w = np.array([[1],[2]])
        b = 2
        X = np.array([[1,2],[3,4]])
        Y = np.array([[1,0]])

        # calculate gradient and cost
        grads, cost = logistic_regression.propagate(w, b, X, Y)

        # compare to expected,
        dw_expected = np.array([[ 0.99993216], [ 1.99980262]])
        db_expected = 0.499935230625
        cost_expected = 6.000064773192205

        self.assertAlmostEqual(cost_expected, cost)
        self.assertAlmostEqual(grads['db'], db_expected)
        assert grads['dw'].shape == dw_expected.shape
        for a, b in zip(grads['dw'].flatten(),
                           dw_expected.flatten()):
            self.assertAlmostEqual(a, b)

    def test_optimize(self):
        """test gradient descent method"""

        # test examples
        w, b, X, Y = (np.array([[1],[2]]), 2, np.array([[1,2],[3,4]]), np.array([[1,0]]))

        params, grads, costs = logistic_regression.optimize(w, b, X, Y, num_iterations= 100, learning_rate = 0.009, print_cost = False)

        # expected output
        w_expected = np.array([[0.1124579 ],
                               [0.23106775]])
        dw_expected = np.array([[ 0.90158428],
                                [ 1.76250842]])
        b_expected = 1.55930492484
        db_expected = 0.430462071679

        # compare output
        self.assertAlmostEqual(params['b'], b_expected)
        self.assertAlmostEqual(grads['db'], db_expected)
        self.compare_arrays(w_expected, params['w'])
        self.compare_arrays(dw_expected, grads['dw'])

    def test_predict(self):

        w, b, X, Y = (np.array([[1],[2]]), 2, np.array([[1,2],[3,4]]), np.array([[1,0]]))

        predictions = logistic_regression.predict(w, b, X)

        assert predictions[0][0] == 1
        assert predictions[0][1] == 1

if __name__ == '__main__':
    unittest.main()