-
Notifications
You must be signed in to change notification settings - Fork 5
/
Copy pathneurons.py
168 lines (124 loc) · 4.55 KB
/
neurons.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
import nengo
import numpy as np
def softrelu(x, sigma=1.):
y = x / sigma
z = np.array(x)
z[y < 34.0] = sigma * np.log1p(np.exp(y[y < 34.0]))
# ^ 34.0 gives exact answer in 32 or 64 bit but doesn't overflow in 32 bit
return z
def lif_j(j, tau_rc, tau_ref, amp):
r = np.zeros_like(j)
r[j > 0] = amp / (tau_ref + tau_rc * np.log1p(1. / j[j > 0]))
return r
def lif(x, tau_rc, tau_ref, gain, bias, amp):
j = gain * x + bias - 1
return lif_j(gain * x + bias - 1, tau_rc, tau_ref, amp)
def d_lif(x, tau_rc, tau_ref, gain, bias, amp):
j = gain * x + bias - 1
r = lif_j(j, tau_rc, tau_ref, amp)
d = np.zeros_like(j)
rr, jj = r[j > 0], j[j > 0]
d[j > 0] = gain * tau_rc * rr * rr / (amp * jj * (jj + 1))
return d
def softlif(x, sigma, tau_rc, tau_ref, gain, bias, amp):
j = softrelu(gain * x + bias - 1, sigma=sigma)
return lif_j(j, tau_rc, tau_ref, amp)
def d_softlif(x, sigma, tau_rc, tau_ref, gain, bias, amp):
y = gain * x + bias - 1
j = softrelu(y, sigma=sigma)
r = lif_j(j, tau_rc, tau_ref, amp)
d = np.zeros_like(j)
rr, jj, yy = r[j > 0], j[j > 0], y[j > 0]
d[j > 0] = (gain * tau_rc * rr * rr) / (
amp * jj * (jj + 1) * (1 + np.exp(-yy / sigma)))
return d
class SoftLIFRate(nengo.neurons.LIFRate):
sigma = nengo.params.NumberParam(low=0)
def __init__(self, sigma=1., **lif_args):
super(SoftLIFRate, self).__init__(**lif_args)
self.sigma = sigma
@property
def _argreprs(self):
args = super(SoftLIFRate, self)._argreprs
if self.sigma != 1.:
args.append("sigma=%s" % self.sigma)
return args
def rates(self, x, gain, bias):
J = gain * x + bias
out = np.zeros_like(J)
SoftLIFRate.step_math(self, dt=1, J=J, output=out)
return out
def step_math(self, dt, J, output):
"""Compute rates in Hz for input current (incl. bias)"""
j = softrelu(J - 1, sigma=self.sigma)
output[:] = 0 # faster than output[j <= 0] = 0
output[j > 0] = 1. / (
self.tau_ref + self.tau_rc * np.log1p(1. / j[j > 0]))
def s_softrelu(x, sigma):
import theano.tensor as tt
y = x / sigma
return tt.switch(y < 34.0, sigma * tt.log1p(tt.exp(y)), x)
def s_lif(x, tau_ref, tau_rc, gain, bias, amp):
import theano.tensor as tt
j = gain * x + bias - 1
v = amp / (tau_ref + tau_rc * tt.log1p(1. / j))
return tt.switch(j > 0, v, 0.0)
def s_softlif(x, sigma, tau_ref, tau_rc, gain, bias, amp):
import theano.tensor as tt
j = gain * x + bias - 1
j = s_softrelu(j, sigma)
v = amp / (tau_ref + tau_rc * tt.log1p(1. / j))
return tt.switch(j > 0, v, 0.0)
def get_numpy_fn(kind, params):
if kind == 'lif':
lif = nengo.LIFRate(tau_rc=params['tau_rc'], tau_ref=params['tau_ref'])
return lambda x: (
lif.rates(x, params['gain'], params['bias']) * params['amp'])
elif kind == 'softlif':
softlif = SoftLIFRate(
tau_rc=params['tau_rc'], tau_ref=params['tau_ref'], sigma=params['sigma'])
return lambda x: (
softlif.rates(x, params['gain'], params['bias']) * params['amp'])
else:
raise ValueError("Unknown neuron type '%s'" % kind)
def get_numpy_deriv(kind, params):
if kind == 'lif':
return lambda x: d_lif(x, **params)
elif kind == 'softlif':
return lambda x: d_softlif(x, **params)
else:
raise ValueError("Unknown neuron type '%s'" % kind)
def get_theano_fn(kind, params):
import theano
import theano.tensor as tt
params = dict(params)
for param in params:
params[param] = tt.cast(params[param], dtype=theano.config.floatX)
if kind == 'lif':
return lambda x: s_lif(x, **params)
elif kind == 'softlif':
return lambda x: s_softlif(x, **params)
else:
raise ValueError("Unknown neuron type '%s'" % kind)
def test_theano():
import theano
import theano.tensor as tt
import matplotlib.pyplot as plt
lif_params = dict(tau_rc=0.02, tau_ref=0.002, gain=1, bias=1, amp=1. / 63.04)
softlif_params = dict(lif_params)
softlif_params['sigma'] = 0.01
x = np.linspace(-1, 1)
lif = get_theano_fn('lif', lif_params)
sx = tt.vector()
lif = theano.function([sx], lif(sx))
softlif = get_theano_fn('softlif', softlif_params)
sx = tt.vector()
softlif = theano.function([sx], softlif(sx))
y_lif = lif(x)
y_softlif = softlif(x)
plt.figure()
plt.plot(x, y_lif)
plt.plot(x, y_softlif)
plt.show()
if __name__ == '__main__':
test_theano()