Package rdkit :: Package ML :: Package Neural :: Module ActFuncs
[hide private]
[frames] | no frames]

Source Code for Module rdkit.ML.Neural.ActFuncs

 1  # 
 2  #  Copyright (C) 2000-2008  greg Landrum 
 3  # 
 4  """ Activation functions for neural network nodes 
 5   
 6  Activation functions should implement the following API: 
 7   
 8   - _Eval(x)_: returns the value of the function at a given point 
 9   
10   - _Deriv(x)_: returns the derivative of the function at a given point 
11   
12  The current Backprop implementation also requires: 
13   
14   - _DerivFromVal(val)_: returns the derivative of the function when its 
15                          value is val 
16   
17  In all cases _x_ is a float as is the value returned. 
18   
19  """ 
20  import math 
21   
22   
23 -class ActFunc(object):
24 """ "virtual base class" for activation functions 25 26 """ 27
28 - def __call__(self, x):
29 return self.Eval(x)
30 31
32 -class Sigmoid(ActFunc):
33 """ the standard sigmoidal function """ 34
35 - def Eval(self, x):
36 return 1. / (1. + math.exp(-self.beta * x))
37
38 - def Deriv(self, x):
39 val = self.Eval(x) 40 return self.beta * val * (1. - val)
41
42 - def DerivFromVal(self, val):
43 return self.beta * val * (1. - val)
44
45 - def __init__(self, beta=1.):
46 self.beta = beta
47 48
49 -class TanH(ActFunc):
50 """ the standard hyperbolic tangent function """ 51
52 - def Eval(self, x):
53 v1 = math.exp(self.beta * x) 54 v2 = math.exp(-self.beta * x) 55 return (v1 - v2) / (v1 + v2)
56
57 - def Deriv(self, x):
58 val = self.Eval(x) 59 return self.beta * (1 - val * val)
60
61 - def DerivFromVal(self, val):
62 return self.beta * (1 - val * val)
63
64 - def __init__(self, beta=1.):
65 self.beta = beta
66