Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Ensure compatibility with NESTML custom as well as NEST built-in synaptic plasticity models #997

Open
wants to merge 7 commits into
base: master
Choose a base branch
from
3 changes: 2 additions & 1 deletion pynestml/codegeneration/nest_code_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ class NESTCodeGenerator(CodeGenerator):

Options:

- **neuron_parent_class**: The C++ class from which the generated NESTML neuron class inherits. Examples: ``"ArchivingNode"``, ``"StructuralPlasticityNode"``. Default: ``"ArchivingNode"``.
- **neuron_parent_class**: The C++ class from which the generated NESTML neuron class inherits. Examples: ``"ArchivingNode"``, ``"StructuralPlasticityNode"``. To generate a model that has the smallest memory footprint, use ``"StructuralPlasticityNode"``. To ensure compatibility with the NEST built-in plastic synapses (like the ``stdp_synapse``), choose ``"ArchivingNode"``. Default: ``"ArchivingNode"``.
- **neuron_parent_class_include**: The C++ header filename to include that contains **neuron_parent_class**. Default: ``"archiving_node.h"``.
- **neuron_synapse_pairs**: List of pairs of (neuron, synapse) model names.
- **neuron_models**: List of neuron model names. Instructs the code generator that models with these names are neuron models.
Expand Down Expand Up @@ -169,6 +169,7 @@ def __init__(self, options: Optional[Mapping[str, Any]] = None):
def run_nest_target_specific_cocos(self, neurons: Sequence[ASTModel], synapses: Sequence[ASTModel]):
for synapse in synapses:
synapse_name_stripped = removesuffix(removesuffix(synapse.name.split("_with_")[0], "_"), FrontendConfiguration.suffix)
assert synapse_name_stripped in self.get_option("delay_variable").keys(), "Please specify a ``delay_variable`` for the synapse '" + synapse_name_stripped + "'"
delay_variable = self.get_option("delay_variable")[synapse_name_stripped]
CoCoNESTSynapseDelayNotAssignedTo.check_co_co(delay_variable, synapse)
if Logger.has_errors(synapse):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -280,7 +280,7 @@ std::vector< std::tuple< int, int > > {{neuronName}}::rport_to_nestml_buffer_idx
{%- if paired_synapse is defined %}
n_incoming_ = __n.n_incoming_;
max_delay_ = __n.max_delay_;
last_spike_ = __n.last_spike_;
last_spike_nestml_ = __n.last_spike_nestml_;

// cache initial values
{%- for var_name in transferred_variables %}
Expand Down Expand Up @@ -386,7 +386,7 @@ void {{neuronName}}::init_state_internal_()
// state variables for archiving state for paired synapse
n_incoming_ = 0;
max_delay_ = 0;
last_spike_ = -1.;
last_spike_nestml_ = -1.;

// cache initial values
{%- for var_name in transferred_variables %}
Expand Down Expand Up @@ -1004,19 +1004,23 @@ void
inline double
{{neuronName}}::get_spiketime_ms() const
{
return last_spike_;
return last_spike_nestml_;
}

void
{{neuronName}}::register_stdp_connection( double t_first_read, double delay )
{
{%- if neuron_parent_class in ["ArchivingNode", "Archiving_Node"] %}
{{ neuron_parent_class }}::register_stdp_connection(t_first_read, delay);
{%- endif %}

// Mark all entries in the deque, which we will not read in future as read by
// this input input, so that we safely increment the incoming number of
// connections afterwards without leaving spikes in the history.
// For details see bug #218. MH 08-04-22

for ( std::deque< histentry__{{neuronName}} >::iterator runner = history_.begin();
runner != history_.end() and ( t_first_read - runner->t_ > -1.0 * nest::kernel().connection_manager.get_stdp_eps() );
for ( std::deque< histentry__{{neuronName}} >::iterator runner = history_nestml_.begin();
runner != history_nestml_.end() and ( t_first_read - runner->t_ > -1.0 * nest::kernel().connection_manager.get_stdp_eps() );
++runner )
{
( runner->access_counter_ )++;
Expand All @@ -1029,26 +1033,26 @@ void


void
{{neuronName}}::get_history__( double t1,
{{neuronName}}::get_history_nestml_( double t1,
double t2,
std::deque< histentry__{{neuronName}} >::iterator* start,
std::deque< histentry__{{neuronName}} >::iterator* finish )
{
*finish = history_.end();
if ( history_.empty() )
*finish = history_nestml_.end();
if ( history_nestml_.empty() )
{
*start = *finish;
return;
}
std::deque< histentry__{{neuronName}} >::reverse_iterator runner = history_.rbegin();
std::deque< histentry__{{neuronName}} >::reverse_iterator runner = history_nestml_.rbegin();
const double t2_lim = t2 + nest::kernel().connection_manager.get_stdp_eps();
const double t1_lim = t1 + nest::kernel().connection_manager.get_stdp_eps();
while ( runner != history_.rend() and runner->t_ >= t2_lim )
while ( runner != history_nestml_.rend() and runner->t_ >= t2_lim )
{
++runner;
}
*finish = runner.base();
while ( runner != history_.rend() and runner->t_ >= t1_lim )
while ( runner != history_nestml_.rend() and runner->t_ >= t1_lim )
{
runner->access_counter_++;
++runner;
Expand Down Expand Up @@ -1076,32 +1080,32 @@ void
// STDP synapses, and
// - there is another, later spike, that is strictly more than
// (min_global_delay + max_delay_ + eps) away from the new spike (at t_sp_ms)
while ( history_.size() > 1 )
while ( history_nestml_.size() > 1 )
{
const double next_t_sp = history_[ 1 ].t_;
const double next_t_sp = history_nestml_[ 1 ].t_;
// Note that ``access_counter`` now has an extra multiplicative factor equal (``n_incoming_``) to the number of trace values that exist, so that spikes are removed from the history only after they have been read out for the sake of computing each trace.
// see https://www.frontiersin.org/files/Articles/1382/fncom-04-00141-r1/image_m/fncom-04-00141-g003.jpg (Potjans et al. 2010)

if ( history_.front().access_counter_ >= n_incoming_ * num_transferred_variables
if ( history_nestml_.front().access_counter_ >= n_incoming_ * num_transferred_variables
and t_sp_ms - next_t_sp > max_delay_ + nest::Time::delay_steps_to_ms(nest::kernel().connection_manager.get_min_delay()) + nest::kernel().connection_manager.get_stdp_eps() )
{
history_.pop_front();
history_nestml_.pop_front();
}
else
{
break;
}
}

if (history_.size() > 0)
if (history_nestml_.size() > 0)
{
assert(history_.back().t_ == last_spike_);
assert(history_nestml_.back().t_ == last_spike_nestml_);

{%- for var in purely_numeric_state_variables_moved|sort %}
{{ printer.print(utils.get_state_variable_by_name(astnode, var)) }} = history_.back().{{var}}_;
{{ printer.print(utils.get_state_variable_by_name(astnode, var)) }} = history_nestml_.back().{{var}}_;
{%- endfor %}
{%- for var in analytic_state_variables_moved|sort %}
{{ printer.print(utils.get_state_variable_by_name(astnode, var)) }} = history_.back().{{var}}_;
{{ printer.print(utils.get_state_variable_by_name(astnode, var)) }} = history_nestml_.back().{{var}}_;
{%- endfor %}
}
else
Expand All @@ -1115,13 +1119,13 @@ void
}

/**
* update state variables transferred from synapse from `last_spike_` to `t_sp_ms`
* update state variables transferred from synapse from `last_spike_nestml_` to `t_sp_ms`
*
* variables that will be integrated: {{ purely_numeric_state_variables_moved + analytic_state_variables_moved }}
**/

const double old___h = V_.__h;
V_.__h = t_sp_ms - last_spike_;
V_.__h = t_sp_ms - last_spike_nestml_;
if (V_.__h > 1E-12)
{
recompute_internal_variables(true);
Expand Down Expand Up @@ -1155,8 +1159,8 @@ void
{{ printer.print(utils.get_variable_by_name(astnode, spike_update.get_variable().get_complete_name())) }} += 1.;
{%- endfor %}

last_spike_ = t_sp_ms;
history_.push_back( histentry__{{neuronName}}( last_spike_
last_spike_nestml_ = t_sp_ms;
history_nestml_.push_back( histentry__{{neuronName}}( last_spike_nestml_
{%- for var in purely_numeric_state_variables_moved|sort %}
, get_{{var}}()
{%- endfor %}
Expand All @@ -1168,16 +1172,20 @@ void
}
else
{
last_spike_ = t_sp_ms;
last_spike_nestml_ = t_sp_ms;
}
}


void
{{neuronName}}::clear_history()
{
last_spike_ = -1.0;
history_.clear();
{%- if neuron_parent_class in ["ArchivingNode", "Archiving_Node"] %}
{{ neuron_parent_class }}::clear_history();
{%- endif %}

last_spike_nestml_ = -1.0;
history_nestml_.clear();
}


Expand All @@ -1200,7 +1208,7 @@ double
#endif

// case when the neuron has not yet spiked
if ( history_.empty() )
if ( history_nestml_.empty() )
{
#ifdef DEBUG
std::cout << "{{neuronName}}::get_{{var}}: \thistory empty, returning initial value = " << {{var}}__iv << std::endl;
Expand All @@ -1210,25 +1218,25 @@ double
}

// search for the latest post spike in the history buffer that came strictly before `t`
int i = history_.size() - 1;
int i = history_nestml_.size() - 1;
double eps = 0.;
if ( before_increment )
{
eps = nest::kernel().connection_manager.get_stdp_eps();
}
while ( i >= 0 )
{
if ( t - history_[ i ].t_ >= eps )
if ( t - history_nestml_[ i ].t_ >= eps )
{
#ifdef DEBUG
std::cout<<"{{neuronName}}::get_{{var}}: \tspike occurred at history[i].t_ = " << history_[i].t_ << std::endl;
std::cout<<"{{neuronName}}::get_{{var}}: \tspike occurred at history[i].t_ = " << history_nestml_[i].t_ << std::endl;
#endif

{%- for var_ in purely_numeric_state_variables_moved %}
{{ printer.print(utils.get_variable_by_name(astnode, var_)) }} = history_[ i ].{{var_}}_;
{{ printer.print(utils.get_variable_by_name(astnode, var_)) }} = history_nestml_[ i ].{{var_}}_;
{%- endfor %}
{%- for var_ in analytic_state_variables_moved %}
{{ printer.print(utils.get_variable_by_name(astnode, var_)) }} = history_[ i ].{{var_}}_;
{{ printer.print(utils.get_variable_by_name(astnode, var_)) }} = history_nestml_[ i ].{{var_}}_;
{%- endfor %}

/**
Expand All @@ -1237,10 +1245,10 @@ double
* variables that will be integrated: {{ purely_numeric_state_variables_moved + analytic_state_variables_moved }}
**/

if ( t - history_[ i ].t_ >= nest::kernel().connection_manager.get_stdp_eps() )
if ( t - history_nestml_[ i ].t_ >= nest::kernel().connection_manager.get_stdp_eps() )
{
const double old___h = V_.__h;
V_.__h = t - history_[i].t_;
V_.__h = t - history_nestml_[i].t_;
assert(V_.__h > 0);
recompute_internal_variables(true);

Expand All @@ -1262,13 +1270,13 @@ double
}

// this case occurs when the trace was requested at a time precisely at that of the first spike in the history
if ( (!before_increment) and t == history_[ 0 ].t_)
if ( (!before_increment) and t == history_nestml_[ 0 ].t_)
{
{%- for var_ in purely_numeric_state_variables_moved %}
{{ printer.print(utils.get_state_variable_by_name(astnode, var_)) }} = history_[ 0 ].{{var_}}_;
{{ printer.print(utils.get_state_variable_by_name(astnode, var_)) }} = history_nestml_[ 0 ].{{var_}}_;
{%- endfor %}
{%- for var_ in analytic_state_variables_moved %}
{{ printer.print(utils.get_state_variable_by_name(astnode, var_)) }} = history_[ 0 ].{{var_}}_;
{{ printer.print(utils.get_state_variable_by_name(astnode, var_)) }} = history_nestml_[ 0 ].{{var_}}_;
{%- endfor %}

#ifdef DEBUG
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -321,14 +321,9 @@ public:
// support for spike archiving

/**
* \fn void get_history(long t1, long t2,
* std::deque<Archiver::histentry__>::iterator* start,
* std::deque<Archiver::histentry__>::iterator* finish)
* return the spike times (in steps) of spikes which occurred in the range
* (t1,t2].
* XXX: two underscores to differentiate it from nest::Node::get_history()
* Return the spike times (in steps) of spikes which occurred in the range (t1,t2].
*/
void get_history__( double t1,
void get_history_nestml_( double t1,
double t2,
std::deque< histentry__{{neuronName}} >::iterator* start,
std::deque< histentry__{{neuronName}} >::iterator* finish );
Expand Down Expand Up @@ -445,10 +440,10 @@ private:

double max_delay_;

double last_spike_;
double last_spike_nestml_;

// spiking history needed by stdp synapses
std::deque< histentry__{{neuronName}} > history_;
std::deque< histentry__{{neuronName}} > history_nestml_;

// cache for initial values
{%- for var in transferred_variables %}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -723,7 +723,7 @@ public:
// history[0, ..., t_last_spike - dendritic_delay] have been
// incremented by Archiving_Node::register_stdp_connection(). See bug #218 for
// details.
__target->get_history__( t_lastspike_ - __dendritic_delay,
__target->get_history_nestml_( t_lastspike_ - __dendritic_delay,
__t_spike - __dendritic_delay,
&start,
&finish );
Expand Down Expand Up @@ -1269,7 +1269,7 @@ inline void
// get spike history in relevant range (t_last_update, t_trig] from postsyn. neuron
std::deque< histentry__{{paired_neuron}} >::iterator start;
std::deque< histentry__{{paired_neuron}} >::iterator finish;
static_cast<{{paired_neuron}}*>(get_target(t))->get_history__( t_last_update_ - dendritic_delay, t_trig - dendritic_delay, &start, &finish );
static_cast<{{paired_neuron}}*>(get_target(t))->get_history_nestml_( t_last_update_ - dendritic_delay, t_trig - dendritic_delay, &start, &finish );

// facilitation due to postsyn. spikes since last update
double t0 = t_last_update_;
Expand Down
Loading