Skip to content

Commit

Permalink
add Bouhadjar sequence learning network tutorial
Browse files Browse the repository at this point in the history
  • Loading branch information
C.A.P. Linssen committed May 9, 2024
1 parent 7eb99b5 commit 233334a
Show file tree
Hide file tree
Showing 7 changed files with 3,337 additions and 2,813 deletions.
165 changes: 165 additions & 0 deletions doc/tutorials/sequences/iaf_psc_exp_nonlineardendrite_neuron.nestml
Original file line number Diff line number Diff line change
@@ -0,0 +1,165 @@
model iaf_psc_exp_nonlineardendrite_neuron:

state:
V_m mV = 0 mV # membrane potential in mV
dAP_trace pA = 0 pA # dAP trace
active_dendrite boolean = false
active_dendrite_readout real = 0.
dAP_counts integer = 0
ref_counts integer = 0
I_dend pA = 0 pA
I_dend$ pA/ms = 0 pA/ms

equations:
# exponential shaped postsynaptic current kernel
kernel I_kernel1 = exp(-1/tau_syn1*t)

# alpha shaped postsynaptic current kernel
#kernel I_kernel2 = (e/tau_syn2) * t * exp(-t/tau_syn2)
I_dend' = I_dend$ - I_dend / tau_syn2
I_dend$' = -I_dend$ / tau_syn2

# exponential shaped postsynaptic current kernel
kernel I_kernel3 = exp(-1/tau_syn3*t)

# diff. eq. for membrane potential
#recordable inline I_dend pA = convolve(I_kernel2, I_2) * pA
inline I_syn pA = convolve(I_kernel1, I_1) * pA - convolve(I_kernel3, I_3) * pA + I_e
V_m' = -(V_m - E_L)/tau_m + (I_syn + I_dend) / C_m

# diff. eq. for dAP trace
dAP_trace' = -dAP_trace / tau_h

parameters:
C_m pF = 250 pF # capacity of the membrane
tau_m ms = 20 ms # membrane time constant.
tau_syn1 ms = 10 ms # time constant of synaptic current, port 1
tau_syn2 ms = 10 ms # time constant of synaptic current, port 2
tau_syn3 ms = 10 ms # time constant of synaptic current, port 3
tau_h ms = 400 ms # time constant of the dAP trace
V_th mV = 25 mV # spike threshold
V_reset mV = 0 mV # reset voltage
I_e pA = 0pA # external current.
E_L mV = 0mV # resting potential.

# dendritic action potential
theta_dAP pA = 60 pA # current threshold for a dendritic action potential
I_p pA = 250 pA # current clamp value for I_dAP during a dendritic action potential
tau_dAP ms = 60 ms # time window over which the dendritic current clamp is active
dAP_timeout_ticks integer = steps(tau_dAP)

# refractory parameters
t_ref ms = 10 ms # refractory period
ref_timeout_ticks integer = steps(t_ref)

I_dend_incr pA/ms = pA * exp(1) / tau_syn2


input:
I_1 <- spike
I_2 <- spike
I_3 <- spike

output:
spike

onReceive(I_2):
I_dend$ += I_2 * ms * I_dend_incr * 1E6 # XXX factor 1E6?!

update:
# solve ODEs
integrate_odes()

# current-threshold, emit a dendritic action potential
if I_dend > theta_dAP or active_dendrite:
if dAP_counts == 0:

if active_dendrite == false:
# starting dAP
dAP_trace += 1 pA
active_dendrite = true
active_dendrite_readout = 1.
I_dend = I_p
dAP_counts = dAP_timeout_ticks
else:
# ending dAP
I_dend = 0 pA
active_dendrite = false
active_dendrite_readout = 0.

# the following assignment to I_dend$ reproduces a bug in the original implementation
c1 real = -resolution() * exp(-resolution() / tau_syn2) / tau_syn2**2
c2 real = (-resolution() + tau_syn2)*exp(-resolution() / tau_syn2)/tau_syn2
I_dend$ = I_p * c1 / (1 - c2) / ms

else:
dAP_counts -= 1
I_dend = I_p

# threshold crossing and refractoriness
if ref_counts == 0:
if V_m > V_th:
emit_spike()
ref_counts = ref_timeout_ticks
V_m = V_reset
dAP_counts = 0
I_dend = 0 pA
active_dendrite = false
active_dendrite_readout = 0.
else:
ref_counts -= 1
V_m = V_reset
active_dendrite = false
active_dendrite_readout = 0.
dAP_counts = 0
I_dend = 0 pA


















































5,901 changes: 3,121 additions & 2,780 deletions doc/tutorials/sequences/sequences.ipynb

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -577,7 +577,7 @@ void {{neuronName}}::pre_run_hook()
{%- endif %}
{
#ifdef DEBUG
std::cout << "{{neuronName}}::pre_run_hook()" << std::endl;
std::cout << "[neuron " << this << "] {{neuronName}}::pre_run_hook()" << std::endl;
#endif
B_.logger_.init();

Expand Down Expand Up @@ -633,7 +633,7 @@ void {{neuronName}}::pre_run_hook()
void {{neuronName}}::update_delay_variables()
{
#ifdef DEBUG
std::cout << "{{neuronName}}::update_delay_variables()" << std::endl;
std::cout << "[neuron " << this << "] {{neuronName}}::update_delay_variables()" << std::endl;
#endif
{%- for variable_symbol in neuron.get_state_symbols() %}
{%- if variable_symbol.has_delay_parameter() %}
Expand All @@ -657,7 +657,7 @@ double {{neuronName}}::get_delayed_{{variable.get_symbol_name()}}() const
{%- if use_gap_junctions %}
bool {{neuronName}}::update_( nest::Time const& origin, const long from, const long to, const bool called_from_wfr_update )
{%- else %}
void {{neuronName}}::update(nest::Time const & origin,const long from, const long to)
void {{neuronName}}::update(nest::Time const & origin, const long from, const long to)
{%- endif %}
{
const double __resolution = nest::Time::get_resolution().get_ms(); // do not remove, this is necessary for the resolution() function
Expand Down Expand Up @@ -697,7 +697,7 @@ const double {{ variable_name }}__tmp = {{ printer.print(var_ast) }};
auto get_t = [origin, lag](){ return nest::Time( nest::Time::step( origin.get_steps() + lag + 1) ).get_ms(); };

#ifdef DEBUG
std::cout << "In {{neuronName}}::update{% if use_gap_junctions %}_{% endif %}: t = " << get_t() << std::endl;
std::cout << "[neuron " << this << "] In {{neuronName}}::update{% if use_gap_junctions %}_{% endif %}: t = " << get_t() << std::endl;
#endif

{%- if use_gap_junctions %}
Expand Down Expand Up @@ -980,7 +980,10 @@ const double {{ variable_name }}__tmp = {{ printer.print(var_ast) }};
{%- endif %}

{%- if state_vars_that_need_continuous_buffering | length > 0 %}
write_continuous_variable_history( nest::Time::step( origin.get_steps() + lag + 1 ),
#ifdef DEBUG
std::cout << "[neuron " << this << "] Writing history at time " << nest::Time(nest::Time::step( origin.get_steps() + lag + 1 )).get_ms() << "\n";
#endif
write_continuous_variable_history( nest::Time(nest::Time::step( origin.get_steps() + lag + 1 )),
{%- for state_var_name in state_vars_that_need_continuous_buffering_transformed %}
{%- set state_var = utils.get_variable_by_name(astnode, state_var_name) %}
{{ printer.print(state_var) }}{% if not loop.last %}, {% endif %}
Expand Down Expand Up @@ -1041,7 +1044,23 @@ continuous_variable_histentry_{{ neuronName }} {{ neuronName }}::get_continuous_
}

// if we get here, there is no entry at time t
std::cout << "\n\n\nXXX FIX ME: no entry at time t!\n\n\n";
std::cout << "[neuron " << this << "] XXX FIX ME: no entry at time t = " << t << "! Size = " << continuous_variable_history_.size() << ", first t = " << continuous_variable_history_.begin()->t_ << ", last t = " << std::prev(continuous_variable_history_.end())->t_ << "\n";

size_t i = 0;
runner = continuous_variable_history_.begin();
while ( runner != continuous_variable_history_.end() )
{
std::cout << "[neuron " << this << "] \t" << i << "\t" << runner->t_ << "\n";
++runner;
++i;
}







return continuous_variable_histentry_{{ neuronName }}(0.,
{%- for state_var in state_vars_that_need_continuous_buffering %}
0.{% if not loop.last %}, {% endif %}
Expand All @@ -1054,7 +1073,7 @@ void {{neuronName}}::get_continuous_variable_history( double t1,
std::deque< continuous_variable_histentry_{{ neuronName }} >::iterator* finish )
{
#ifdef DEBUG
std::cout << "{{neuronName}}::get_continuous_variable_history()" << std::endl;
std::cout << "[neuron " << this << "] {{neuronName}}::get_continuous_variable_history()" << std::endl;
#endif
*finish = continuous_variable_history_.end();
if ( continuous_variable_history_.empty() )
Expand Down Expand Up @@ -1089,14 +1108,15 @@ void {{neuronName}}::write_continuous_variable_history(nest::Time const &t,
{%- endfor %})
{
#ifdef DEBUG
std::cout << "{{neuronName}}::write_continuous_variable_history()" << std::endl;
std::cout << "[neuron " << this << "] {{neuronName}}::write_continuous_variable_history()" << std::endl;
#endif
const double t_ms = t.get_ms();

// prune all entries from history which are no longer needed except the penultimate one. we might still need it.
auto orig = continuous_variable_history_.size();
while ( continuous_variable_history_.size() > 1 )
{
if ( continuous_variable_history_.front().access_counter_ >= n_incoming_ )
if ( continuous_variable_history_.front().access_counter_ >= 10 * n_incoming_ )
{
continuous_variable_history_.pop_front();
}
Expand All @@ -1106,12 +1126,16 @@ void {{neuronName}}::write_continuous_variable_history(nest::Time const &t,
}
}

if (continuous_variable_history_.size() != orig) {
std::cout << "[neuron " << this << "] \tpruning history, original size = " << orig << ", new size = " << continuous_variable_history_.size() << "\n";
}

continuous_variable_history_.push_back( continuous_variable_histentry_{{ neuronName }}( t_ms,
{%- for state_var in state_vars_that_need_continuous_buffering %}
{{ state_var }}{% if not loop.last %}, {% endif %}
{%- endfor %}) );
#ifdef DEBUG
std::cout << "\thistory size = " << continuous_variable_history_.size() << std::endl;
std::cout << "[neuron " << this << "] \thistory size = " << continuous_variable_history_.size() << std::endl;
#endif
}
{%- endif %}
Expand All @@ -1127,7 +1151,7 @@ void {{neuronName}}::handle(nest::DataLoggingRequest& e)
void {{neuronName}}::handle(nest::SpikeEvent &e)
{
#ifdef DEBUG
std::cout << "{{neuronName}}::handle()" << std::endl;
std::cout << "[neuron " << this << "] {{neuronName}}::handle()" << std::endl;
#endif
assert(e.get_delay_steps() > 0);
assert( e.get_rport() < B_.spike_inputs_.size() );
Expand Down Expand Up @@ -1163,7 +1187,7 @@ void {{neuronName}}::handle(nest::SpikeEvent &e)
void {{neuronName}}::handle(nest::CurrentEvent& e)
{
#ifdef DEBUG
std::cout << "{{neuronName}}::handle(CurrentEvent)" << std::endl;
std::cout << "[neuron " << this << "] {{neuronName}}::handle(CurrentEvent)" << std::endl;
#endif
assert(e.get_delay_steps() > 0);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -725,11 +725,13 @@ public:
&finish );
while ( start != finish )
{
std::cout << "\tprocessing post spike at t = " << start->t_ + __dendritic_delay << std::endl;

{%- if paired_neuron_name is not none and paired_neuron_name|length > 0 and paired_neuron.state_vars_that_need_continuous_buffering | length > 0 %}
/**
* grab state variables from the postsynaptic neuron at the time of the post spike
**/

std::cout << "Grabbing continuous_variable_history at t = " << start->t_ + __dendritic_delay << "\n";
auto histentry = ((post_neuron_t*)(__target))->get_continuous_variable_history(start->t_ + __dendritic_delay);

{%- for var_name in paired_neuron.state_vars_that_need_continuous_buffering %}
Expand All @@ -754,9 +756,6 @@ public:
break; // this would in any case have been the last post spike to be processed
}

#ifdef DEBUG
std::cout << "\tprocessing post spike at t = " << start->t_ + __dendritic_delay << std::endl;
#endif

/**
* update synapse internal state from `t_lastspike_` to `start->t_`
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,12 @@
/**
* generated code for emit_spike() function
**/

{% if ast.get_args() | length == 0 %}
{#- no parameters -- emit_spike() called from within neuron #}
//#ifdef DEBUG
std::cout << "Emitting a spike at t = " << nest::Time(nest::Time::step(origin.get_steps() + lag + 1)).get_ms() << "\n";
//#endif
set_spiketime(nest::Time::step(origin.get_steps() + lag + 1));
nest::SpikeEvent se;
nest::kernel().event_delivery_manager.send(*this, se, lag);
Expand All @@ -24,5 +28,3 @@ e.set_delay_steps( get_delay_steps() );
e.set_rport( get_rport() );
e();
{%- endif %}


17 changes: 5 additions & 12 deletions pynestml/meta_model/ast_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,25 +74,18 @@ def clone(self):
pass

@abstractmethod
def equals(self, other):
def equals(self, other) -> bool:
"""
The equals operation.
:param other: a different object.
:type other: object
:param other: a different AST node.
:return: True if equal, otherwise False.
:rtype: bool
"""
pass

# todo: we can do this with a visitor instead of hard coding grammar traversals all over the place
@abstractmethod
def get_parent(self, ast):
def get_parent(self):
"""
Indicates whether a this node contains the handed over node.
:param ast: an arbitrary meta_model node.
:type ast: AST_
:return: AST if this or one of the child nodes contains the handed over element.
:rtype: AST_ or None
Get the parent of this node.
:return: The parent node
"""
pass

Expand Down
6 changes: 3 additions & 3 deletions pynestml/visitors/ast_visitor.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@
from pynestml.meta_model.ast_nestml_compilation_unit import ASTNestMLCompilationUnit
from pynestml.meta_model.ast_model import ASTModel
from pynestml.meta_model.ast_model_body import ASTModelBody
from pynestml.meta_model.ast_node import ASTNode
from pynestml.meta_model.ast_ode_equation import ASTOdeEquation
from pynestml.meta_model.ast_inline_expression import ASTInlineExpression
from pynestml.meta_model.ast_on_condition_block import ASTOnConditionBlock
Expand Down Expand Up @@ -730,11 +731,10 @@ def handle(self, _node):
self.get_real_self().traverse(_node)
self.get_real_self().endvisit(_node)

def visit(self, node):
def visit(self, node: ASTNode):
"""
Dispatcher for visitor pattern.
:param node: The ASTElement to visit
:type node: ASTElement or inherited
:param node: The ASTNode to visit
"""
if isinstance(node, ASTArithmeticOperator):
self.visit_arithmetic_operator(node)
Expand Down

0 comments on commit 233334a

Please sign in to comment.