view tvii/activation.py @ 43:2f0caec46e26

[activation functions] add ReLU
author Jeff Hammel <k0scist@gmail.com>
date Mon, 04 Sep 2017 14:53:32 -0700
parents 8c1648921827
children 857a606783e1
line wrap: on
line source

"""
activation functions
"""

# tanh:
# g(z) = tanh(z) = (exp(z) - exp(-z))/(exp(z) + exp(-z))
# g'(z) = 1 - (tanh(z))**2

# ReLU
def ReLU(z):
    return max((0, z))

def ReLUprime(z):
    return 1. if z > 0 else 0.