# 梯度替代

$\begin{split}\Theta(x) = \begin{cases} 1, & x \geq 0 \\ 0, & x < 0 \end{cases}\end{split}$

$\begin{split}\delta(x) = \begin{cases} +\infty, & x = 0 \\ 0, & x \neq 0 \end{cases}\end{split}$

# spikingjelly.activation_based.neuron
class BaseNode(base.MemoryModule):
def __init__(..., surrogate_function: Callable = surrogate.Sigmoid(), ...)
# ...
self.surrogate_function = surrogate_function
# ...

def neuronal_fire(self):
return self.surrogate_function(self.v - self.v_threshold)


spikingjelly.activation_based.surrogate 中提供了一些常用的替代函数，其中Sigmoid函数 $$\sigma(x, \alpha) = \frac{1}{1 + \exp(-\alpha x)}$$spikingjelly.activation_based.surrogate.Sigmoid，下图展示了原始的Heaviside阶跃函数 Heavisidealpha=5 时的Sigmoid原函数 Primitive 以及其梯度 Gradient

import torch
from spikingjelly.activation_based import surrogate

sg = surrogate.Sigmoid(alpha=4.)

x = torch.rand([8]) - 0.5
y = sg(x)
y.sum().backward()
print(f'x={x}')
print(f'y={y}')


x=tensor([-0.1303,  0.4976,  0.3364,  0.4296,  0.2779,  0.4580,  0.4447,  0.2466],
y=tensor([0., 1., 1., 1., 1., 1., 1., 1.], grad_fn=<sigmoidBackward>)
x.grad=tensor([0.9351, 0.4231, 0.6557, 0.5158, 0.7451, 0.4759, 0.4943, 0.7913])


Sigmoid

sigmoid

SoftSign

soft_sign

LeakyKReLU

leaky_k_relu

import torch
from spikingjelly.activation_based import surrogate

alpha = 4.
x = torch.rand([8]) - 0.5