Source code for pymlp.mlp.TransferFunctions

import numpy as np

[docs]class Logistic: """The logistic function and its derivative.""" def __init__(self, beta=10): self.beta = beta
[docs] def f(self, activations): """Is the activation function.""" return 1.0 / (1.0 + np.exp(- self.beta * activations))
[docs] def df(self, activations): """Differential of the activation function f. According to (Rojas, 1996)""" return activations * (1 - activations)
[docs]class Linear: """Linear function and its derivative."""
[docs] def f(self, activations): return activations
[docs] def df(self, activations): return np.ones(activations.shape)
[docs]class TanH: """The TanH function and its derivative."""
[docs] def f(self, activations): # result = np.where(activations > 1.92032, 0.96016, activations) # result = np.where((activations < 1.92032) * (activations > 0.0), # 0.260371 * result ** 2 + result, result) # result = np.where((activations > -1.92032) * (activations <= 0.0), # 0.260373271 * result ** 2 + result, result) # result = np.where(activations < -1.02032, -0.9016, result) return np.tanh(activations)
[docs] def df(self, activations): return 1 - activations ** 2