diff --git a/doc/running/index.rst b/doc/running/index.rst
index 9d5c50305..390b675d1 100644
--- a/doc/running/index.rst
+++ b/doc/running/index.rst
@@ -4,8 +4,18 @@ Running NESTML
Running NESTML causes several processing steps to occur:
1. The model is parsed from file and checked (syntax, consistent physical units, and so on).
-2. Code is generated from the model by one of the "code generators" selected when NESTML was invoked.
-3. If necessary, the code is compiled and built by the "builder" that belongs to the selected code generator.
+2. A series of transformation steps are optionally carried out on the model. Which set of transformers is used is automatically determined based on the target platform selected when NESTML was invoked. (See the section "Available transformers" below.)
+3. Code is generated from the model by one of the "code generators" selected when NESTML was invoked. If necessary, the code is compiled and built by the "builder" that belongs to the selected code generator. (See the section "Supported target platforms" below.)
+
+
+Available transformers
+----------------------
+
+- **Non-dimensionalisation transformer**
+
+ :class:`pynestml.transformers.non_dimensionalisation_transformer.NonDimensionalisationTransformer`
+
+ Eliminates physical units from models, converting numerical values to real numbers.
Supported target platforms
diff --git a/models/neurons/aeif_cond_exp_neuron.nestml b/models/neurons/aeif_cond_exp_neuron.nestml
index f7927871b..54f9049b8 100644
--- a/models/neurons/aeif_cond_exp_neuron.nestml
+++ b/models/neurons/aeif_cond_exp_neuron.nestml
@@ -1,140 +1,7 @@
-# aeif_cond_exp - Conductance based exponential integrate-and-fire neuron model
-# #############################################################################
-#
-# Description
-# +++++++++++
-#
-# aeif_cond_exp is the adaptive exponential integrate and fire neuron
-# according to Brette and Gerstner (2005), with post-synaptic
-# conductances in the form of truncated exponentials.
-#
-# The membrane potential is given by the following differential equation:
-#
-# .. math::
-#
-# C_m \frac{dV_m}{dt} =
-# -g_L(V_m-E_L)+g_L\Delta_T\exp\left(\frac{V_m-V_{th}}{\Delta_T}\right) - g_e(t)(V_m-E_e) \\
-# -g_i(t)(V_m-E_i)-w +I_e
-#
-# and
-#
-# .. math::
-#
-# \tau_w \frac{dw}{dt} = a(V_m-E_L) - w
-#
-# Note that the membrane potential can diverge to positive infinity due to the exponential term. To avoid numerical instabilities, instead of :math:`V_m`, the value :math:`\min(V_m,V_{peak})` is used in the dynamical equations.
-#
-# .. note::
-#
-# The default refractory period for ``aeif`` models is zero, consistent with the model definition in
-# Brette & Gerstner [1]_. Thus, an ``aeif`` neuron with default parameters can fire multiple spikes in a single
-# time step, which can lead to exploding spike numbers and extreme slow-down of simulations.
-# To avoid such unphysiological behavior, you should set a refractory time ``refr_t > 0``.
-#
-#
-# References
-# ++++++++++
-#
-# .. [1] Brette R and Gerstner W (2005). Adaptive exponential
-# integrate-and-fire model as an effective description of neuronal
-# activity. Journal of Neurophysiology. 943637-3642
-# DOI: https://doi.org/10.1152/jn.00686.2005
-#
-#
-# See also
-# ++++++++
-#
-# iaf_cond_exp, aeif_cond_alpha
-#
-#
-# Copyright statement
-# +++++++++++++++++++
-#
-# This file is part of NEST.
-#
-# Copyright (C) 2004 The NEST Initiative
-#
-# NEST is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 2 of the License, or
-# (at your option) any later version.
-#
-# NEST is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with NEST. If not, see .
-#
-#
model aeif_cond_exp_neuron:
-
state:
- V_m mV = E_L # Membrane potential
- w pA = 0 pA # Spike-adaptation current
- refr_t ms = 0 ms # Refractory period timer
-
- equations:
- inline V_bounded mV = min(V_m, V_peak) # prevent exponential divergence
- kernel g_inh = exp(-t / tau_syn_inh)
- kernel g_exc = exp(-t / tau_syn_exc)
-
- # Add inlines to simplify the equation definition of V_m
- inline exp_arg real = (V_bounded - V_th) / Delta_T
- inline I_spike pA = g_L * Delta_T * exp(exp_arg)
- inline I_syn_exc pA = convolve(g_exc, exc_spikes) * nS * (V_bounded - E_exc)
- inline I_syn_inh pA = convolve(g_inh, inh_spikes) * nS * (V_bounded - E_inh)
-
- V_m' = (-g_L * (V_bounded - E_L) + I_spike - I_syn_exc - I_syn_inh - w + I_e + I_stim) / C_m
- w' = (a * (V_bounded - E_L) - w) / tau_w
-
- refr_t' = -1e3 * ms/s # refractoriness is implemented as an ODE, representing a timer counting back down to zero. XXX: TODO: This should simply read ``refr_t' = -1 / s`` (see https://github.com/nest/nestml/issues/984)
+ V_m V = E_L # Membrane potential
parameters:
- # membrane parameters
- C_m pF = 281.0 pF # Membrane Capacitance
- refr_T ms = 2 ms # Duration of refractory period
- V_reset mV = -60.0 mV # Reset Potential
- g_L nS = 30.0 nS # Leak Conductance
- E_L mV = -70.6 mV # Leak reversal Potential (aka resting potential)
-
- # spike adaptation parameters
- a nS = 4 nS # Subthreshold adaptation
- b pA = 80.5 pA # Spike-triggered adaptation
- Delta_T mV = 2.0 mV # Slope factor
- tau_w ms = 144.0 ms # Adaptation time constant
- V_th mV = -50.4 mV # Spike initiation threshold
- V_peak mV = 0 mV # Spike detection threshold
-
- # synaptic parameters
- E_exc mV = 0 mV # Excitatory reversal Potential
- tau_syn_exc ms = 0.2 ms # Synaptic Time Constant Excitatory Synapse
- E_inh mV = -85.0 mV # Inhibitory reversal Potential
- tau_syn_inh ms = 2.0 ms # Synaptic Time Constant for Inhibitory Synapse
-
- # constant external input current
- I_e pA = 0 pA
-
- input:
- inh_spikes <- inhibitory spike
- exc_spikes <- excitatory spike
- I_stim pA <- continuous
-
- output:
- spike
-
- update:
- if refr_t > 0 ms:
- # neuron is absolute refractory, do not evolve V_m
- integrate_odes(w, refr_t)
- else:
- # neuron not refractory
- integrate_odes(w, V_m)
-
- onCondition(refr_t <= 0 ms and V_m >= V_peak):
- # threshold crossing
- refr_t = refr_T # start of the refractory period
- V_m = V_reset # clamp potential
- w += b
- emit_spike()
+ E_L V = -70.6E3 uV # Leak reversal Potential (aka resting potential)
+ foo V = 1000 * V_m # Leak reversal Potential (aka resting potential)
diff --git a/pynestml/cocos/co_co_function_calls_consistent.py b/pynestml/cocos/co_co_function_calls_consistent.py
index 2b9baa544..e993240f3 100644
--- a/pynestml/cocos/co_co_function_calls_consistent.py
+++ b/pynestml/cocos/co_co_function_calls_consistent.py
@@ -28,7 +28,6 @@
from pynestml.utils.ast_utils import ASTUtils
from pynestml.utils.logger import Logger, LoggingLevel
from pynestml.utils.messages import Messages
-from pynestml.utils.type_caster import TypeCaster
from pynestml.visitors.ast_visitor import ASTVisitor
@@ -109,5 +108,5 @@ def visit_function_call(self, node):
# variadic type symbol accepts anything
return
- if not actual_type.equals(expected_type) and not isinstance(expected_type, TemplateTypeSymbol):
- TypeCaster.try_to_recover_or_error(expected_type, actual_type, actual_arg)
+ # if not actual_type.equals(expected_type) and not isinstance(expected_type, TemplateTypeSymbol):
+ # TypeCaster.try_to_recover_or_error(expected_type, actual_type, actual_arg)
diff --git a/pynestml/cocos/co_co_illegal_expression.py b/pynestml/cocos/co_co_illegal_expression.py
index c362d0dc5..41e431af4 100644
--- a/pynestml/cocos/co_co_illegal_expression.py
+++ b/pynestml/cocos/co_co_illegal_expression.py
@@ -28,7 +28,6 @@
from pynestml.utils.logger import LoggingLevel, Logger
from pynestml.utils.logging_helper import LoggingHelper
from pynestml.utils.messages import Messages
-from pynestml.utils.type_caster import TypeCaster
from pynestml.visitors.ast_visitor import ASTVisitor
@@ -68,9 +67,8 @@ def visit_declaration(self, node):
if isinstance(rhs_type, ErrorTypeSymbol):
LoggingHelper.drop_missing_type_error(node)
return
- if self.__types_do_not_match(lhs_type, rhs_type):
- TypeCaster.try_to_recover_or_error(lhs_type, rhs_type, node.get_expression())
- return
+ # if self.__types_do_not_match(lhs_type, rhs_type):
+ # TypeCaster.try_to_recover_or_error(lhs_type, rhs_type, node.get_expression())
def visit_inline_expression(self, node):
"""
@@ -82,8 +80,8 @@ def visit_inline_expression(self, node):
if isinstance(rhs_type, ErrorTypeSymbol):
LoggingHelper.drop_missing_type_error(node)
return
- if self.__types_do_not_match(lhs_type, rhs_type):
- TypeCaster.try_to_recover_or_error(lhs_type, rhs_type, node.get_expression())
+ # if self.__types_do_not_match(lhs_type, rhs_type):
+ # TypeCaster.try_to_recover_or_error(lhs_type, rhs_type, node.get_expression())
def visit_assignment(self, node):
"""
@@ -120,23 +118,23 @@ def handle_compound_assignment(self, node):
lhs_type_symbol = lhs_variable_symbol.get_type_symbol()
if node.is_compound_product:
- if self.__types_do_not_match(lhs_type_symbol, lhs_type_symbol * rhs_type_symbol):
- TypeCaster.try_to_recover_or_error(lhs_type_symbol, lhs_type_symbol * rhs_type_symbol,
- node.get_expression())
- return
+ # if self.__types_do_not_match(lhs_type_symbol, lhs_type_symbol * rhs_type_symbol):
+ # TypeCaster.try_to_recover_or_error(lhs_type_symbol, lhs_type_symbol * rhs_type_symbol,
+ # node.get_expression())
+ # return
return
if node.is_compound_quotient:
- if self.__types_do_not_match(lhs_type_symbol, lhs_type_symbol / rhs_type_symbol):
- TypeCaster.try_to_recover_or_error(lhs_type_symbol, lhs_type_symbol / rhs_type_symbol,
- node.get_expression())
- return
+ # if self.__types_do_not_match(lhs_type_symbol, lhs_type_symbol / rhs_type_symbol):
+ # TypeCaster.try_to_recover_or_error(lhs_type_symbol, lhs_type_symbol / rhs_type_symbol,
+ # node.get_expression())
+ # return
return
assert node.is_compound_sum or node.is_compound_minus
- if self.__types_do_not_match(lhs_type_symbol, rhs_type_symbol):
- TypeCaster.try_to_recover_or_error(lhs_type_symbol, rhs_type_symbol,
- node.get_expression())
+ # if self.__types_do_not_match(lhs_type_symbol, rhs_type_symbol):
+ # TypeCaster.try_to_recover_or_error(lhs_type_symbol, rhs_type_symbol,
+ # node.get_expression())
@staticmethod
def __types_do_not_match(lhs_type_symbol, rhs_type_symbol):
@@ -154,11 +152,10 @@ def handle_simple_assignment(self, node):
LoggingHelper.drop_missing_type_error(node)
return
- if lhs_variable_symbol is not None and self.__types_do_not_match(lhs_variable_symbol.get_type_symbol(),
- rhs_type_symbol):
- TypeCaster.try_to_recover_or_error(lhs_variable_symbol.get_type_symbol(), rhs_type_symbol,
- node.get_expression())
- return
+ # if lhs_variable_symbol is not None and self.__types_do_not_match(lhs_variable_symbol.get_type_symbol(),
+ # rhs_type_symbol):
+ # TypeCaster.try_to_recover_or_error(lhs_variable_symbol.get_type_symbol(), rhs_type_symbol,
+ # node.get_expression())
def visit_if_clause(self, node):
"""
diff --git a/pynestml/cocos/co_co_output_port_defined_if_emit_call.py b/pynestml/cocos/co_co_output_port_defined_if_emit_call.py
index e3c1cd10b..74e74bc0d 100644
--- a/pynestml/cocos/co_co_output_port_defined_if_emit_call.py
+++ b/pynestml/cocos/co_co_output_port_defined_if_emit_call.py
@@ -104,6 +104,7 @@ def visit_function_call(self, node: ASTFunctionCall):
# types are not equal, but castable
code, message = Messages.get_implicit_cast_rhs_to_lhs(output_block_attr_type_sym.print_symbol(),
emit_spike_arg_type_sym.print_symbol())
+ # XXX: this should be removed
Logger.log_message(error_position=output_blocks[0].get_source_position(),
code=code, message=message, log_level=LoggingLevel.WARNING)
continue
diff --git a/pynestml/cocos/co_co_user_defined_function_correctly_defined.py b/pynestml/cocos/co_co_user_defined_function_correctly_defined.py
index ec62a9ac4..bfc61b843 100644
--- a/pynestml/cocos/co_co_user_defined_function_correctly_defined.py
+++ b/pynestml/cocos/co_co_user_defined_function_correctly_defined.py
@@ -28,7 +28,6 @@
from pynestml.symbols.symbol import SymbolKind
from pynestml.utils.logger import LoggingLevel, Logger
from pynestml.utils.messages import Messages
-from pynestml.utils.type_caster import TypeCaster
class CoCoUserDefinedFunctionCorrectlyDefined(CoCo):
@@ -128,9 +127,9 @@ def __check_return_recursively(cls, type_symbol=None, stmts=None, ret_defined=Fa
code, message = Messages.get_type_could_not_be_derived(cls.processed_function.get_name())
Logger.log_message(error_position=stmt.get_source_position(),
code=code, message=message, log_level=LoggingLevel.ERROR)
- elif not type_of_return.equals(type_symbol):
- TypeCaster.try_to_recover_or_error(type_symbol, type_of_return,
- stmt.get_return_stmt().get_expression())
+ # elif not type_of_return.equals(type_symbol):
+ # TypeCaster.try_to_recover_or_error(type_symbol, type_of_return,
+ # stmt.get_return_stmt().get_expression())
elif isinstance(stmt, ASTCompoundStmt):
# otherwise it is a compound stmt, thus check recursively
if stmt.is_if_stmt():
diff --git a/pynestml/codegeneration/nest_code_generator.py b/pynestml/codegeneration/nest_code_generator.py
index 066109e3d..ae1afa529 100644
--- a/pynestml/codegeneration/nest_code_generator.py
+++ b/pynestml/codegeneration/nest_code_generator.py
@@ -18,7 +18,6 @@
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see .
-
from typing import Any, Dict, List, Mapping, Optional, Sequence, Tuple
import datetime
@@ -62,6 +61,7 @@
from pynestml.symbols.unit_type_symbol import UnitTypeSymbol
from pynestml.symbols.symbol import SymbolKind
from pynestml.transformers.inline_expression_expansion_transformer import InlineExpressionExpansionTransformer
+from pynestml.transformers.non_dimensionalisation_transformer import NonDimensionalisationTransformer
from pynestml.transformers.synapse_post_neuron_transformer import SynapsePostNeuronTransformer
from pynestml.utils.ast_utils import ASTUtils
from pynestml.utils.logger import Logger
@@ -413,6 +413,24 @@ def analyse_neuron(self, neuron: ASTModel) -> Tuple[Dict[str, ASTAssignment], Di
spike_updates, post_spike_updates = self.get_spike_update_expressions(neuron, kernel_buffers, [analytic_solver, numeric_solver], delta_factors)
+ # update expressions are processed by the transformer
+ for var_name, spike_updates_for_var in spike_updates.items():
+ transformed_spike_update_exprs = []
+ for spike_update_expr in spike_updates_for_var:
+ transformed_spike_update_exprs.append(NonDimensionalisationTransformer({"quantity_to_preferred_prefix": {"electrical potential": "m",
+ "electrical current": "p",
+ "electrical capacitance": "p",
+ "electrical resistance": "M",
+ "electrical conductance": "n",
+ "time": "m"}}).transform([spike_update_expr])[0])
+
+ spike_updates[var_name] = transformed_spike_update_exprs
+
+
+
+
+ # post_spike_updates = NonDimensionalisationTransformer().transform(post_spike_updates)
+
return spike_updates, post_spike_updates, equations_with_delay_vars, equations_with_vector_vars
def analyse_synapse(self, synapse: ASTModel) -> Dict[str, ASTAssignment]:
diff --git a/pynestml/codegeneration/nest_unit_converter.py b/pynestml/codegeneration/nest_unit_converter.py
deleted file mode 100644
index d8d88e2c5..000000000
--- a/pynestml/codegeneration/nest_unit_converter.py
+++ /dev/null
@@ -1,79 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# nest_unit_converter.py
-#
-# This file is part of NEST.
-#
-# Copyright (C) 2004 The NEST Initiative
-#
-# NEST is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 2 of the License, or
-# (at your option) any later version.
-#
-# NEST is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with NEST. If not, see .
-
-from astropy import units
-
-
-class NESTUnitConverter:
- r"""
- NEST Simulator uses a set of default physical units internally. This class calculates the factor needed to convert any given physical unit to its NEST counterpart.
- """
-
- @classmethod
- def get_factor(cls, unit: units.UnitBase) -> float:
- """
- Gives a factor for a given unit that transforms it to a "neuroscience" scale. If the given unit is not listed as a neuroscience unit, the factor is 1.
-
- :param unit: an astropy unit
- :type unit: IrreducibleUnit or Unit or CompositeUnit
- :return: a factor to that unit, converting it to "neuroscience" scales.
- """
- assert (isinstance(unit, units.IrreducibleUnit) or isinstance(unit, units.CompositeUnit)
- or isinstance(unit, units.Unit) or isinstance(unit, units.PrefixUnit)), \
- "UnitConverter: given parameter is not a unit (%s)!" % type(unit)
-
- # check if it is dimensionless, thus only a prefix
- if unit.physical_type == 'dimensionless':
- return unit.si
-
- # otherwise check if it is one of the base units
- target_unit = None
- if unit.physical_type == 'electrical conductance':
- target_unit = units.nS
-
- if unit.physical_type == 'electrical resistance':
- target_unit = units.Gohm
-
- if unit.physical_type == 'time':
- target_unit = units.ms
-
- if unit.physical_type == 'electrical capacitance':
- target_unit = units.pF
-
- if unit.physical_type == 'electrical potential':
- target_unit = units.mV
-
- if unit.physical_type == 'electrical current':
- target_unit = units.pA
-
- if target_unit is not None:
- return (unit / target_unit).si.scale
-
- if unit == unit.bases[0] and len(unit.bases) == 1:
- # this case means that we stuck in a recursive definition
- # just return the factor 1.0
- return 1.0
-
- # now if it is not a base unit, it has to be a combined one, e.g. s**2, decompose it
- factor = 1.0
- for i in range(0, len(unit.bases)):
- factor *= cls.get_factor(unit.bases[i]) ** unit.powers[i]
- return factor
diff --git a/pynestml/codegeneration/printers/c_simple_expression_printer.py b/pynestml/codegeneration/printers/c_simple_expression_printer.py
index 7b2ccf748..2221e67ab 100644
--- a/pynestml/codegeneration/printers/c_simple_expression_printer.py
+++ b/pynestml/codegeneration/printers/c_simple_expression_printer.py
@@ -71,7 +71,4 @@ def _print(self, node: ASTNode) -> str:
return self.print_simple_expression(node)
def print(self, node: ASTNode) -> str:
- if node.get_implicit_conversion_factor() and not node.get_implicit_conversion_factor() == 1:
- return "(" + str(node.get_implicit_conversion_factor()) + " * (" + self._print(node) + "))"
-
return self._print(node)
diff --git a/pynestml/codegeneration/printers/cpp_expression_printer.py b/pynestml/codegeneration/printers/cpp_expression_printer.py
index e392a35b6..44f54bcce 100644
--- a/pynestml/codegeneration/printers/cpp_expression_printer.py
+++ b/pynestml/codegeneration/printers/cpp_expression_printer.py
@@ -39,9 +39,6 @@ class CppExpressionPrinter(ExpressionPrinter):
def print(self, node: ASTNode) -> str:
if isinstance(node, ASTExpression):
- if node.get_implicit_conversion_factor() and not node.get_implicit_conversion_factor() == 1:
- return "(" + str(node.get_implicit_conversion_factor()) + " * (" + self.print_expression(node) + "))"
-
return self.print_expression(node)
return self._simple_expression_printer.print(node)
diff --git a/pynestml/codegeneration/printers/gsl_variable_printer.py b/pynestml/codegeneration/printers/gsl_variable_printer.py
index c9cfbc46f..26903ca4b 100644
--- a/pynestml/codegeneration/printers/gsl_variable_printer.py
+++ b/pynestml/codegeneration/printers/gsl_variable_printer.py
@@ -18,7 +18,7 @@
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see .
-from pynestml.codegeneration.nest_unit_converter import NESTUnitConverter
+
from pynestml.codegeneration.printers.cpp_variable_printer import CppVariablePrinter
from pynestml.meta_model.ast_variable import ASTVariable
from pynestml.symbols.predefined_units import PredefinedUnits
@@ -45,7 +45,7 @@ def print_variable(self, node: ASTVariable) -> str:
if symbol is None:
# test if variable name can be resolved to a type
if PredefinedUnits.is_unit(node.get_complete_name()):
- return str(NESTUnitConverter.get_factor(PredefinedUnits.get_unit(node.get_complete_name()).get_unit()))
+ return str(PredefinedUnits.get_unit(node.get_complete_name()).get_unit())
code, message = Messages.get_could_not_resolve(node.get_name())
Logger.log_message(log_level=LoggingLevel.ERROR, code=code, message=message,
diff --git a/pynestml/codegeneration/printers/latex_expression_printer.py b/pynestml/codegeneration/printers/latex_expression_printer.py
index 79b04a0d0..14cd54a74 100644
--- a/pynestml/codegeneration/printers/latex_expression_printer.py
+++ b/pynestml/codegeneration/printers/latex_expression_printer.py
@@ -35,10 +35,6 @@ class LatexExpressionPrinter(ExpressionPrinter):
"""
def print(self, node: ASTExpressionNode) -> str:
- if node.get_implicit_conversion_factor() is not None \
- and str(node.get_implicit_conversion_factor()) not in ["1.", "1.0", "1"]:
- return str(node.get_implicit_conversion_factor()) + " * (" + self.print_expression(node) + ")"
-
return self.print_expression(node)
def print_expression(self, node: ASTExpressionNode) -> str:
diff --git a/pynestml/codegeneration/printers/nest_variable_printer.py b/pynestml/codegeneration/printers/nest_variable_printer.py
index 5f7e9fbfa..ac7e3b6aa 100644
--- a/pynestml/codegeneration/printers/nest_variable_printer.py
+++ b/pynestml/codegeneration/printers/nest_variable_printer.py
@@ -26,7 +26,6 @@
from pynestml.codegeneration.nest_code_generator_utils import NESTCodeGeneratorUtils
from pynestml.codegeneration.printers.cpp_variable_printer import CppVariablePrinter
from pynestml.codegeneration.printers.expression_printer import ExpressionPrinter
-from pynestml.codegeneration.nest_unit_converter import NESTUnitConverter
from pynestml.meta_model.ast_external_variable import ASTExternalVariable
from pynestml.meta_model.ast_variable import ASTVariable
from pynestml.symbols.predefined_units import PredefinedUnits
@@ -99,7 +98,7 @@ def print_variable(self, variable: ASTVariable) -> str:
if symbol is None:
# test if variable name can be resolved to a type
if PredefinedUnits.is_unit(variable.get_complete_name()):
- return str(NESTUnitConverter.get_factor(PredefinedUnits.get_unit(variable.get_complete_name()).get_unit()))
+ return str(PredefinedUnits.get_unit(variable.get_complete_name()).get_unit())
code, message = Messages.get_could_not_resolve(variable.get_name())
Logger.log_message(log_level=LoggingLevel.ERROR, code=code, message=message,
@@ -111,18 +110,11 @@ def print_variable(self, variable: ASTVariable) -> str:
vector_param = "[" + self._expression_printer.print(variable.get_vector_parameter()) + "]"
if symbol.is_buffer():
- if isinstance(symbol.get_type_symbol(), UnitTypeSymbol):
- units_conversion_factor = NESTUnitConverter.get_factor(symbol.get_type_symbol().unit.unit)
- else:
- units_conversion_factor = 1
s = ""
- if not units_conversion_factor == 1:
- s += "(" + str(units_conversion_factor) + " * "
if self.cpp_variable_suffix == "":
s += "B_."
s += self._print_buffer_value(variable)
- if not units_conversion_factor == 1:
- s += ")"
+
return s
if symbol.is_inline_expression:
diff --git a/pynestml/codegeneration/printers/nestml_expression_printer.py b/pynestml/codegeneration/printers/nestml_expression_printer.py
index f5795cde1..8880be6a8 100644
--- a/pynestml/codegeneration/printers/nestml_expression_printer.py
+++ b/pynestml/codegeneration/printers/nestml_expression_printer.py
@@ -36,9 +36,6 @@ class NESTMLExpressionPrinter(ExpressionPrinter):
def print(self, node: ASTNode) -> str:
if isinstance(node, ASTExpression):
- if node.get_implicit_conversion_factor() and not node.get_implicit_conversion_factor() == 1:
- return "(" + str(node.get_implicit_conversion_factor()) + " * (" + self.print_expression(node) + "))"
-
return self.print_expression(node)
if isinstance(node, ASTArithmeticOperator):
diff --git a/pynestml/codegeneration/printers/nestml_simple_expression_printer.py b/pynestml/codegeneration/printers/nestml_simple_expression_printer.py
index 8198c239a..fef554d0a 100644
--- a/pynestml/codegeneration/printers/nestml_simple_expression_printer.py
+++ b/pynestml/codegeneration/printers/nestml_simple_expression_printer.py
@@ -41,9 +41,6 @@ def _print(self, node: ASTNode) -> str:
return self.print_simple_expression(node)
def print(self, node: ASTNode) -> str:
- if node.get_implicit_conversion_factor() and not node.get_implicit_conversion_factor() == 1:
- return "(" + str(node.get_implicit_conversion_factor()) + " * (" + self._print(node) + "))"
-
return self._print(node)
def print_simple_expression(self, node: ASTSimpleExpression) -> str:
diff --git a/pynestml/codegeneration/printers/python_expression_printer.py b/pynestml/codegeneration/printers/python_expression_printer.py
index c129db26b..6b040a0b6 100644
--- a/pynestml/codegeneration/printers/python_expression_printer.py
+++ b/pynestml/codegeneration/printers/python_expression_printer.py
@@ -36,9 +36,6 @@ class PythonExpressionPrinter(ExpressionPrinter):
def print(self, node: ASTNode) -> str:
if isinstance(node, ASTExpression):
- if node.get_implicit_conversion_factor() and not node.get_implicit_conversion_factor() == 1:
- return "(" + str(node.get_implicit_conversion_factor()) + " * (" + self.print_expression(node) + "))"
-
return self.print_expression(node)
return self._simple_expression_printer.print(node)
diff --git a/pynestml/codegeneration/printers/python_simple_expression_printer.py b/pynestml/codegeneration/printers/python_simple_expression_printer.py
index e9ecde84e..370b4ca3a 100644
--- a/pynestml/codegeneration/printers/python_simple_expression_printer.py
+++ b/pynestml/codegeneration/printers/python_simple_expression_printer.py
@@ -75,7 +75,4 @@ def _print(self, node: ASTNode) -> str:
return self.print_simple_expression(node)
def print(self, node: ASTNode) -> str:
- if node.get_implicit_conversion_factor() and not node.get_implicit_conversion_factor() == 1:
- return "(" + str(node.get_implicit_conversion_factor()) + " * (" + self._print(node) + "))"
-
return self._print(node)
diff --git a/pynestml/codegeneration/printers/python_variable_printer.py b/pynestml/codegeneration/printers/python_variable_printer.py
index d03bdadd0..01895c399 100644
--- a/pynestml/codegeneration/printers/python_variable_printer.py
+++ b/pynestml/codegeneration/printers/python_variable_printer.py
@@ -21,7 +21,6 @@
from __future__ import annotations
-from pynestml.codegeneration.nest_unit_converter import NESTUnitConverter
from pynestml.codegeneration.printers.expression_printer import ExpressionPrinter
from pynestml.codegeneration.printers.variable_printer import VariablePrinter
from pynestml.codegeneration.python_code_generator_utils import PythonCodeGeneratorUtils
@@ -93,7 +92,7 @@ def print_variable(self, variable: ASTVariable) -> str:
if symbol is None:
# test if variable name can be resolved to a type
if PredefinedUnits.is_unit(variable.get_complete_name()):
- return str(NESTUnitConverter.get_factor(PredefinedUnits.get_unit(variable.get_complete_name()).get_unit()))
+ return str(PredefinedUnits.get_unit(variable.get_complete_name()).get_unit())
code, message = Messages.get_could_not_resolve(variable.get_name())
Logger.log_message(log_level=LoggingLevel.ERROR, code=code, message=message,
@@ -105,17 +104,9 @@ def print_variable(self, variable: ASTVariable) -> str:
vector_param = "[" + self._expression_printer.print(variable.get_vector_parameter()) + "]"
if symbol.is_buffer():
- if isinstance(symbol.get_type_symbol(), UnitTypeSymbol):
- units_conversion_factor = NESTUnitConverter.get_factor(symbol.get_type_symbol().unit.unit)
- else:
- units_conversion_factor = 1
s = ""
- if not units_conversion_factor == 1:
- s += "(" + str(units_conversion_factor) + " * "
s += self._print(variable, symbol, with_origin=self.with_origin) + vector_param
s += vector_param
- if not units_conversion_factor == 1:
- s += ")"
return s
if symbol.is_inline_expression:
diff --git a/pynestml/codegeneration/printers/spinnaker_c_variable_printer.py b/pynestml/codegeneration/printers/spinnaker_c_variable_printer.py
index b9ba5dfc1..87ed79529 100644
--- a/pynestml/codegeneration/printers/spinnaker_c_variable_printer.py
+++ b/pynestml/codegeneration/printers/spinnaker_c_variable_printer.py
@@ -26,7 +26,6 @@
from pynestml.codegeneration.spinnaker_code_generator_utils import SPINNAKERCodeGeneratorUtils
from pynestml.codegeneration.printers.cpp_variable_printer import CppVariablePrinter
from pynestml.codegeneration.printers.expression_printer import ExpressionPrinter
-from pynestml.codegeneration.nest_unit_converter import NESTUnitConverter
from pynestml.meta_model.ast_external_variable import ASTExternalVariable
from pynestml.meta_model.ast_variable import ASTVariable
from pynestml.symbols.predefined_units import PredefinedUnits
@@ -70,7 +69,7 @@ def print_variable(self, variable: ASTVariable) -> str:
if symbol is None:
# test if variable name can be resolved to a type
if PredefinedUnits.is_unit(variable.get_complete_name()):
- return str(NESTUnitConverter.get_factor(PredefinedUnits.get_unit(variable.get_complete_name()).get_unit()))
+ return str(PredefinedUnits.get_unit(variable.get_complete_name()).get_unit())
code, message = Messages.get_could_not_resolve(variable.get_name())
Logger.log_message(log_level=LoggingLevel.ERROR, code=code, message=message,
@@ -83,7 +82,7 @@ def print_variable(self, variable: ASTVariable) -> str:
if symbol.is_buffer():
if isinstance(symbol.get_type_symbol(), UnitTypeSymbol):
- units_conversion_factor = NESTUnitConverter.get_factor(symbol.get_type_symbol().unit.unit)
+ units_conversion_factor = symbol.get_type_symbol().unit.unit
else:
units_conversion_factor = 1
s = ""
diff --git a/pynestml/codegeneration/printers/spinnaker_python_simple_expression_printer.py b/pynestml/codegeneration/printers/spinnaker_python_simple_expression_printer.py
index 3d2c99244..1b56abea3 100644
--- a/pynestml/codegeneration/printers/spinnaker_python_simple_expression_printer.py
+++ b/pynestml/codegeneration/printers/spinnaker_python_simple_expression_printer.py
@@ -79,7 +79,4 @@ def _print(self, node: ASTNode) -> str:
return self.print_simple_expression(node)
def print(self, node: ASTNode) -> str:
- if node.get_implicit_conversion_factor() and not node.get_implicit_conversion_factor() == 1:
- return "(" + str(node.get_implicit_conversion_factor()) + " * (" + self._print(node) + "))"
-
return self._print(node)
diff --git a/pynestml/codegeneration/printers/sympy_simple_expression_printer.py b/pynestml/codegeneration/printers/sympy_simple_expression_printer.py
index bf9c64bea..adeeb7a7a 100644
--- a/pynestml/codegeneration/printers/sympy_simple_expression_printer.py
+++ b/pynestml/codegeneration/printers/sympy_simple_expression_printer.py
@@ -19,7 +19,6 @@
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see .
-from pynestml.codegeneration.nest_unit_converter import NESTUnitConverter
from pynestml.codegeneration.printers.cpp_simple_expression_printer import CppSimpleExpressionPrinter
from pynestml.codegeneration.printers.simple_expression_printer import SimpleExpressionPrinter
from pynestml.meta_model.ast_function_call import ASTFunctionCall
@@ -45,7 +44,7 @@ def print_simple_expression(self, node: ASTSimpleExpression) -> str:
node.variable.get_complete_name(), SymbolKind.VARIABLE) is not None
if not node_is_variable_symbol and PredefinedUnits.is_unit(node.variable.get_complete_name()):
# case for a literal unit, e.g. "ms"
- return str(NESTUnitConverter.get_factor(PredefinedUnits.get_unit(node.variable.get_complete_name()).get_unit()))
+ return str(PredefinedUnits.get_unit(node.variable.get_complete_name()).get_unit())
if node.has_unit():
if self._variable_printer.print(node.get_variable()) in ["1", "1.", "1.0"]:
@@ -87,7 +86,4 @@ def _print(self, node: ASTNode) -> str:
return self.print_simple_expression(node)
def print(self, node: ASTNode) -> str:
- if node.get_implicit_conversion_factor() and not node.get_implicit_conversion_factor() == 1:
- return "(" + str(node.get_implicit_conversion_factor()) + " * (" + self._print(node) + "))"
-
return self._print(node)
diff --git a/pynestml/codegeneration/python_standalone_code_generator.py b/pynestml/codegeneration/python_standalone_code_generator.py
index 76421aa8f..304b23094 100644
--- a/pynestml/codegeneration/python_standalone_code_generator.py
+++ b/pynestml/codegeneration/python_standalone_code_generator.py
@@ -68,7 +68,7 @@ class PythonStandaloneCodeGenerator(NESTCodeGenerator):
"numeric_solver": "rk45",
"neuron_synapse_pairs": [],
"delay_variable": {},
- "weight_variable": {}
+ "weight_variable": {},
}
def __init__(self, options: Optional[Mapping[str, Any]] = None):
diff --git a/pynestml/codegeneration/python_standalone_target_tools.py b/pynestml/codegeneration/python_standalone_target_tools.py
index 32b5ad8a5..be0a26ce4 100644
--- a/pynestml/codegeneration/python_standalone_target_tools.py
+++ b/pynestml/codegeneration/python_standalone_target_tools.py
@@ -24,6 +24,8 @@
import os
import sys
import tempfile
+from pathlib import Path
+
from pynestml.frontend.frontend_configuration import FrontendConfiguration
from pynestml.frontend.pynestml_frontend import generate_python_standalone_target
@@ -36,6 +38,29 @@ class PythonStandaloneTargetTools:
r"""
Helper functions for the Python standalone target.
"""
+
+ @classmethod
+ def _dynamic_import(cls, target_path: str, module_name: str):
+ r"""
+ Dynamically imports a module from a given directory.
+ """
+
+ # Convert the path to an absolute path
+ target_path = Path(target_path).resolve()
+
+ # Add the target directory to sys.path if not already present
+ if str(target_path) not in sys.path:
+ sys.path.append(str(target_path))
+
+ # Import the module dynamically
+ try:
+ module = importlib.import_module(module_name)
+ Logger.log_message(None, -1, f"Successfully imported {module_name}", None, LoggingLevel.INFO)
+ return module # Return the imported module for use
+ except ModuleNotFoundError as e:
+ Logger.log_message(None, -1, f"Module not found Error: {e}", None, LoggingLevel.ERROR)
+ return None
+
@classmethod
def _get_model_parameters_and_state(cls, nestml_file_name: str):
suffix = ""
@@ -58,7 +83,7 @@ def _get_model_parameters_and_state(cls, nestml_file_name: str):
model_name = model.get_name()
py_module_name = os.path.basename(target_path) + "." + model_name
- module = importlib.import_module(py_module_name)
+ module = cls._dynamic_import(target_path, py_module_name)
neuron_name = "Neuron_" + model_name + "(1.0)" # 1.0 is a dummy value for the timestep
neuron = eval("module." + neuron_name)
diff --git a/pynestml/codegeneration/resources_python_standalone/point_neuron/directives_py/MemberInitialization.jinja2 b/pynestml/codegeneration/resources_python_standalone/point_neuron/directives_py/MemberInitialization.jinja2
index a25f4dd80..ad06a7820 100644
--- a/pynestml/codegeneration/resources_python_standalone/point_neuron/directives_py/MemberInitialization.jinja2
+++ b/pynestml/codegeneration/resources_python_standalone/point_neuron/directives_py/MemberInitialization.jinja2
@@ -7,7 +7,7 @@
{%- if variable_symbol.has_vector_parameter() %}
{{ printer.print_vector_declaration(variable_symbol) }}
{%- else %}
-{{ printer.print(variable) }} = {{ printer.print(variable_symbol.get_declaring_expression()) }} # type: {{variable_symbol.get_type_symbol().print_symbol()}}
+{{ printer.print(variable) }} = {{ printer.print(astnode.get_initial_value(variable.get_complete_name())) }} # type: {{ variable_symbol.get_type_symbol().print_symbol() }}
{%- endif %}
{%- else %}
{%- if variable_symbol.has_vector_parameter() %}
diff --git a/pynestml/codegeneration/spinnaker_unit_converter.py b/pynestml/codegeneration/spinnaker_unit_converter.py
deleted file mode 100644
index fb5b12b29..000000000
--- a/pynestml/codegeneration/spinnaker_unit_converter.py
+++ /dev/null
@@ -1,79 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# spinnaker_unit_converter.py
-#
-# This file is part of NEST.
-#
-# Copyright (C) 2004 The NEST Initiative
-#
-# NEST is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 2 of the License, or
-# (at your option) any later version.
-#
-# NEST is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with NEST. If not, see .
-
-from astropy import units
-
-
-class SpinnakerUnitConverter:
- r"""
- NEST Simulator uses a set of default physical units internally. This class calculates the factor needed to convert any given physical unit to its NEST counterpart.
- """
-
- @classmethod
- def get_factor(cls, unit: units.UnitBase) -> float:
- """
- Gives a factor for a given unit that transforms it to a "neuroscience" scale. If the given unit is not listed as a neuroscience unit, the factor is 1.
-
- :param unit: an astropy unit
- :type unit: IrreducibleUnit or Unit or CompositeUnit
- :return: a factor to that unit, converting it to "neuroscience" scales.
- """
- assert (isinstance(unit, units.IrreducibleUnit) or isinstance(unit, units.CompositeUnit)
- or isinstance(unit, units.Unit) or isinstance(unit, units.PrefixUnit)), \
- "UnitConverter: given parameter is not a unit (%s)!" % type(unit)
-
- # check if it is dimensionless, thus only a prefix
- if unit.physical_type == 'dimensionless':
- return unit.si
-
- # otherwise check if it is one of the base units
- target_unit = None
- if unit.physical_type == 'electrical conductance':
- target_unit = units.nS
-
- if unit.physical_type == 'electrical resistance':
- target_unit = units.Gohm
-
- if unit.physical_type == 'time':
- target_unit = units.ms
-
- if unit.physical_type == 'electrical capacitance':
- target_unit = units.pF
-
- if unit.physical_type == 'electrical potential':
- target_unit = units.mV
-
- if unit.physical_type == 'electrical current':
- target_unit = units.nA
-
- if target_unit is not None:
- return (unit / target_unit).si.scale
-
- if unit == unit.bases[0] and len(unit.bases) == 1:
- # this case means that we stuck in a recursive definition
- # just return the factor 1.0
- return 1.0
-
- # now if it is not a base unit, it has to be a combined one, e.g. s**2, decompose it
- factor = 1.0
- for i in range(0, len(unit.bases)):
- factor *= cls.get_factor(unit.bases[i]) ** unit.powers[i]
- return factor
diff --git a/pynestml/frontend/pynestml_frontend.py b/pynestml/frontend/pynestml_frontend.py
index 7d7cb0d66..ebbe5c289 100644
--- a/pynestml/frontend/pynestml_frontend.py
+++ b/pynestml/frontend/pynestml_frontend.py
@@ -107,6 +107,13 @@ def transformers_from_target_name(target_name: str, options: Optional[Mapping[st
options = synapse_post_neuron_co_generation.set_options(options)
transformers.append(synapse_post_neuron_co_generation)
+ if target_name.upper() in ["NEST", "PYTHON_STANDALONE"]:
+ from pynestml.transformers.non_dimensionalisation_transformer import NonDimensionalisationTransformer
+
+ non_dimensionalisation_transformer = NonDimensionalisationTransformer()
+ options = non_dimensionalisation_transformer.set_options(options)
+ transformers.append(non_dimensionalisation_transformer)
+
return transformers, options
@@ -391,7 +398,7 @@ def generate_spinnaker_target(input_path: Union[str, Sequence[str]], target_path
generate_target(input_path, target_platform="spinnaker", target_path=target_path,
install_path=install_path,
- logging_level=logging_level, module_name="nestmlmodule", store_log=store_log, suffix=suffix, dev=dev,
+ logging_level=logging_level, store_log=store_log, suffix=suffix, dev=dev,
codegen_opts=codegen_opts)
diff --git a/pynestml/meta_model/ast_arithmetic_operator.py b/pynestml/meta_model/ast_arithmetic_operator.py
index 59bc3fd04..9718b159d 100644
--- a/pynestml/meta_model/ast_arithmetic_operator.py
+++ b/pynestml/meta_model/ast_arithmetic_operator.py
@@ -38,7 +38,7 @@ class ASTArithmeticOperator(ASTNode):
is_pow_op = False # type:bool
"""
- def __init__(self, is_times_op: bool, is_div_op: bool, is_modulo_op: bool, is_plus_op: bool, is_minus_op: bool, is_pow_op: bool, *args, **kwargs):
+ def __init__(self, is_times_op: bool = False, is_div_op: bool = False, is_modulo_op: bool = False, is_plus_op: bool = False, is_minus_op: bool = False, is_pow_op: bool = False, *args, **kwargs):
super(ASTArithmeticOperator, self).__init__(*args, **kwargs)
assert ((is_times_op + is_div_op + is_modulo_op + is_plus_op + is_minus_op + is_pow_op) == 1), \
'(PyNESTML.AST.ArithmeticOperator) Type of arithmetic operator not specified!'
@@ -67,8 +67,7 @@ def clone(self):
scope=self.scope,
comment=self.comment,
pre_comments=[s for s in self.pre_comments],
- in_comment=self.in_comment,
- implicit_conversion_factor=self.implicit_conversion_factor)
+ in_comment=self.in_comment)
return dup
diff --git a/pynestml/meta_model/ast_assignment.py b/pynestml/meta_model/ast_assignment.py
index 83e03e3e7..940d4f86f 100644
--- a/pynestml/meta_model/ast_assignment.py
+++ b/pynestml/meta_model/ast_assignment.py
@@ -103,8 +103,7 @@ def clone(self):
scope=self.scope,
comment=self.comment,
pre_comments=[s for s in self.pre_comments],
- in_comment=self.in_comment,
- implicit_conversion_factor=self.implicit_conversion_factor)
+ in_comment=self.in_comment)
return dup
diff --git a/pynestml/meta_model/ast_bit_operator.py b/pynestml/meta_model/ast_bit_operator.py
index fe5af3790..a2a9c37f7 100644
--- a/pynestml/meta_model/ast_bit_operator.py
+++ b/pynestml/meta_model/ast_bit_operator.py
@@ -79,8 +79,7 @@ def clone(self):
scope=self.scope,
comment=self.comment,
pre_comments=[s for s in self.pre_comments],
- in_comment=self.in_comment,
- implicit_conversion_factor=self.implicit_conversion_factor)
+ in_comment=self.in_comment)
return dup
diff --git a/pynestml/meta_model/ast_block_with_variables.py b/pynestml/meta_model/ast_block_with_variables.py
index db2a48084..37d819274 100644
--- a/pynestml/meta_model/ast_block_with_variables.py
+++ b/pynestml/meta_model/ast_block_with_variables.py
@@ -94,8 +94,7 @@ def clone(self):
scope=self.scope,
comment=self.comment,
pre_comments=[s for s in self.pre_comments],
- in_comment=self.in_comment,
- implicit_conversion_factor=self.implicit_conversion_factor)
+ in_comment=self.in_comment)
return dup
diff --git a/pynestml/meta_model/ast_comparison_operator.py b/pynestml/meta_model/ast_comparison_operator.py
index 2c94d59e2..dc6f5a194 100644
--- a/pynestml/meta_model/ast_comparison_operator.py
+++ b/pynestml/meta_model/ast_comparison_operator.py
@@ -91,8 +91,7 @@ def clone(self):
scope=self.scope,
comment=self.comment,
pre_comments=[s for s in self.pre_comments],
- in_comment=self.in_comment,
- implicit_conversion_factor=self.implicit_conversion_factor)
+ in_comment=self.in_comment)
return dup
diff --git a/pynestml/meta_model/ast_compound_stmt.py b/pynestml/meta_model/ast_compound_stmt.py
index 96f497f9c..176a714ce 100644
--- a/pynestml/meta_model/ast_compound_stmt.py
+++ b/pynestml/meta_model/ast_compound_stmt.py
@@ -90,8 +90,7 @@ def clone(self):
scope=self.scope,
comment=self.comment,
pre_comments=[s for s in self.pre_comments],
- in_comment=self.in_comment,
- implicit_conversion_factor=self.implicit_conversion_factor)
+ in_comment=self.in_comment)
return dup
diff --git a/pynestml/meta_model/ast_data_type.py b/pynestml/meta_model/ast_data_type.py
index 07eec76c3..5ecd53367 100644
--- a/pynestml/meta_model/ast_data_type.py
+++ b/pynestml/meta_model/ast_data_type.py
@@ -99,8 +99,7 @@ def clone(self):
scope=self.scope,
comment=self.comment,
pre_comments=[s for s in self.pre_comments],
- in_comment=self.in_comment,
- implicit_conversion_factor=self.implicit_conversion_factor)
+ in_comment=self.in_comment)
return dup
diff --git a/pynestml/meta_model/ast_declaration.py b/pynestml/meta_model/ast_declaration.py
index f7d7aca03..ab1695ed3 100644
--- a/pynestml/meta_model/ast_declaration.py
+++ b/pynestml/meta_model/ast_declaration.py
@@ -123,8 +123,7 @@ def clone(self):
scope=self.scope,
comment=self.comment,
pre_comments=[s for s in self.pre_comments],
- in_comment=self.in_comment,
- implicit_conversion_factor=self.implicit_conversion_factor)
+ in_comment=self.in_comment)
return dup
@@ -136,6 +135,13 @@ def get_variables(self):
"""
return self.variables
+ def set_variables(self, _variables):
+ """
+ Sets set of left-hand side variables.
+ :type: list(ASTVariables)
+ """
+ self.variables = _variables
+
def get_decorators(self):
"""
"""
@@ -149,6 +155,13 @@ def get_data_type(self):
"""
return self.data_type
+ def set_data_type(self, data_type):
+ """
+ Sets the data type.
+ :type: ASTDataType
+ """
+ self.data_type = data_type
+
def has_size_parameter(self) -> bool:
"""
Returns whether the declaration has a size parameter or not.
diff --git a/pynestml/meta_model/ast_elif_clause.py b/pynestml/meta_model/ast_elif_clause.py
index 8332409ca..d7b4f0451 100644
--- a/pynestml/meta_model/ast_elif_clause.py
+++ b/pynestml/meta_model/ast_elif_clause.py
@@ -65,8 +65,7 @@ def clone(self):
scope=self.scope,
comment=self.comment,
pre_comments=[s for s in self.pre_comments],
- in_comment=self.in_comment,
- implicit_conversion_factor=self.implicit_conversion_factor)
+ in_comment=self.in_comment)
return dup
diff --git a/pynestml/meta_model/ast_else_clause.py b/pynestml/meta_model/ast_else_clause.py
index a6c604107..586f119cc 100644
--- a/pynestml/meta_model/ast_else_clause.py
+++ b/pynestml/meta_model/ast_else_clause.py
@@ -57,8 +57,7 @@ def clone(self):
scope=self.scope,
comment=self.comment,
pre_comments=[s for s in self.pre_comments],
- in_comment=self.in_comment,
- implicit_conversion_factor=self.implicit_conversion_factor)
+ in_comment=self.in_comment)
return dup
diff --git a/pynestml/meta_model/ast_equations_block.py b/pynestml/meta_model/ast_equations_block.py
index bf6abb75f..6ff9575de 100644
--- a/pynestml/meta_model/ast_equations_block.py
+++ b/pynestml/meta_model/ast_equations_block.py
@@ -73,8 +73,7 @@ def clone(self):
scope=self.scope,
comment=self.comment,
pre_comments=[s for s in self.pre_comments],
- in_comment=self.in_comment,
- implicit_conversion_factor=self.implicit_conversion_factor)
+ in_comment=self.in_comment)
return dup
diff --git a/pynestml/meta_model/ast_expression.py b/pynestml/meta_model/ast_expression.py
index 6f2528b09..5a8a701e7 100644
--- a/pynestml/meta_model/ast_expression.py
+++ b/pynestml/meta_model/ast_expression.py
@@ -152,8 +152,7 @@ def clone(self):
scope=self.scope,
comment=self.comment,
pre_comments=[s for s in self.pre_comments],
- in_comment=self.in_comment,
- implicit_conversion_factor=self.implicit_conversion_factor)
+ in_comment=self.in_comment)
return dup
diff --git a/pynestml/meta_model/ast_external_variable.py b/pynestml/meta_model/ast_external_variable.py
index 5418ffc85..92e2cf510 100644
--- a/pynestml/meta_model/ast_external_variable.py
+++ b/pynestml/meta_model/ast_external_variable.py
@@ -55,8 +55,7 @@ def clone(self):
scope=self.scope,
comment=self.comment,
pre_comments=[s for s in self.pre_comments],
- in_comment=self.in_comment,
- implicit_conversion_factor=self.implicit_conversion_factor)
+ in_comment=self.in_comment)
def update_alt_scope(self, scope):
self._altscope = scope
diff --git a/pynestml/meta_model/ast_for_stmt.py b/pynestml/meta_model/ast_for_stmt.py
index 191bf6cd9..6237694b9 100644
--- a/pynestml/meta_model/ast_for_stmt.py
+++ b/pynestml/meta_model/ast_for_stmt.py
@@ -85,8 +85,7 @@ def clone(self):
scope=self.scope,
comment=self.comment,
pre_comments=[s for s in self.pre_comments],
- in_comment=self.in_comment,
- implicit_conversion_factor=self.implicit_conversion_factor)
+ in_comment=self.in_comment)
return dup
diff --git a/pynestml/meta_model/ast_function.py b/pynestml/meta_model/ast_function.py
index 989a7dfae..8eadf9088 100644
--- a/pynestml/meta_model/ast_function.py
+++ b/pynestml/meta_model/ast_function.py
@@ -97,8 +97,7 @@ def clone(self):
scope=self.scope,
comment=self.comment,
pre_comments=[s for s in self.pre_comments],
- in_comment=self.in_comment,
- implicit_conversion_factor=self.implicit_conversion_factor)
+ in_comment=self.in_comment)
return dup
diff --git a/pynestml/meta_model/ast_function_call.py b/pynestml/meta_model/ast_function_call.py
index a07c3483a..019ac66cf 100644
--- a/pynestml/meta_model/ast_function_call.py
+++ b/pynestml/meta_model/ast_function_call.py
@@ -71,8 +71,7 @@ def clone(self):
scope=self.scope,
comment=self.comment,
pre_comments=[s for s in self.pre_comments],
- in_comment=self.in_comment,
- implicit_conversion_factor=self.implicit_conversion_factor)
+ in_comment=self.in_comment)
return dup
diff --git a/pynestml/meta_model/ast_if_clause.py b/pynestml/meta_model/ast_if_clause.py
index 999485f9b..b80c54a0f 100644
--- a/pynestml/meta_model/ast_if_clause.py
+++ b/pynestml/meta_model/ast_if_clause.py
@@ -64,8 +64,7 @@ def clone(self):
scope=self.scope,
comment=self.comment,
pre_comments=[s for s in self.pre_comments],
- in_comment=self.in_comment,
- implicit_conversion_factor=self.implicit_conversion_factor)
+ in_comment=self.in_comment)
return dup
diff --git a/pynestml/meta_model/ast_if_stmt.py b/pynestml/meta_model/ast_if_stmt.py
index 5d2cfcfe3..ebe9cdbce 100644
--- a/pynestml/meta_model/ast_if_stmt.py
+++ b/pynestml/meta_model/ast_if_stmt.py
@@ -84,8 +84,7 @@ def clone(self):
scope=self.scope,
comment=self.comment,
pre_comments=[s for s in self.pre_comments],
- in_comment=self.in_comment,
- implicit_conversion_factor=self.implicit_conversion_factor)
+ in_comment=self.in_comment)
return dup
diff --git a/pynestml/meta_model/ast_inline_expression.py b/pynestml/meta_model/ast_inline_expression.py
index b8af0f928..3e6169bce 100644
--- a/pynestml/meta_model/ast_inline_expression.py
+++ b/pynestml/meta_model/ast_inline_expression.py
@@ -89,8 +89,7 @@ def clone(self):
scope=self.scope,
comment=self.comment,
pre_comments=[s for s in self.pre_comments],
- in_comment=self.in_comment,
- implicit_conversion_factor=self.implicit_conversion_factor)
+ in_comment=self.in_comment)
return dup
diff --git a/pynestml/meta_model/ast_input_block.py b/pynestml/meta_model/ast_input_block.py
index d74dd8c36..afa5529a0 100644
--- a/pynestml/meta_model/ast_input_block.py
+++ b/pynestml/meta_model/ast_input_block.py
@@ -72,8 +72,7 @@ def clone(self):
scope=self.scope,
comment=self.comment,
pre_comments=[s for s in self.pre_comments],
- in_comment=self.in_comment,
- implicit_conversion_factor=self.implicit_conversion_factor)
+ in_comment=self.in_comment)
return dup
diff --git a/pynestml/meta_model/ast_input_port.py b/pynestml/meta_model/ast_input_port.py
index 45bc87dbb..8dc72593e 100644
--- a/pynestml/meta_model/ast_input_port.py
+++ b/pynestml/meta_model/ast_input_port.py
@@ -103,8 +103,7 @@ def clone(self) -> ASTInputPort:
scope=self.scope,
comment=self.comment,
pre_comments=[s for s in self.pre_comments],
- in_comment=self.in_comment,
- implicit_conversion_factor=self.implicit_conversion_factor)
+ in_comment=self.in_comment)
return dup
diff --git a/pynestml/meta_model/ast_input_qualifier.py b/pynestml/meta_model/ast_input_qualifier.py
index 6c34c33ec..0b6c280ff 100644
--- a/pynestml/meta_model/ast_input_qualifier.py
+++ b/pynestml/meta_model/ast_input_qualifier.py
@@ -68,8 +68,7 @@ def clone(self):
scope=self.scope,
comment=self.comment,
pre_comments=[s for s in self.pre_comments],
- in_comment=self.in_comment,
- implicit_conversion_factor=self.implicit_conversion_factor)
+ in_comment=self.in_comment)
return dup
diff --git a/pynestml/meta_model/ast_kernel.py b/pynestml/meta_model/ast_kernel.py
index e152e118f..125e90577 100644
--- a/pynestml/meta_model/ast_kernel.py
+++ b/pynestml/meta_model/ast_kernel.py
@@ -66,8 +66,7 @@ def clone(self):
scope=self.scope,
comment=self.comment,
pre_comments=[s for s in self.pre_comments],
- in_comment=self.in_comment,
- implicit_conversion_factor=self.implicit_conversion_factor)
+ in_comment=self.in_comment)
return dup
diff --git a/pynestml/meta_model/ast_logical_operator.py b/pynestml/meta_model/ast_logical_operator.py
index e3f3a314f..612f300c0 100644
--- a/pynestml/meta_model/ast_logical_operator.py
+++ b/pynestml/meta_model/ast_logical_operator.py
@@ -65,8 +65,7 @@ def clone(self):
scope=self.scope,
comment=self.comment,
pre_comments=[s for s in self.pre_comments],
- in_comment=self.in_comment,
- implicit_conversion_factor=self.implicit_conversion_factor)
+ in_comment=self.in_comment)
return dup
diff --git a/pynestml/meta_model/ast_model.py b/pynestml/meta_model/ast_model.py
index f14259c86..cea9521e2 100644
--- a/pynestml/meta_model/ast_model.py
+++ b/pynestml/meta_model/ast_model.py
@@ -21,6 +21,9 @@
from typing import List, Optional
+import astropy.units as u
+from pynestml.meta_model.ast_expression import ASTExpression
+from pynestml.meta_model.ast_simple_expression import ASTSimpleExpression
from pynestml.meta_model.ast_block_with_variables import ASTBlockWithVariables
from pynestml.meta_model.ast_kernel import ASTKernel
from pynestml.meta_model.ast_on_condition_block import ASTOnConditionBlock
@@ -46,7 +49,7 @@ class ASTModel(ASTNode):
This class is used to stuff common to neurons and synapses
"""
- def __init__(self, name: str, body: ASTModelBody, artifact_name=None, *args, **kwargs):
+ def __init__(self, name: str, body: ASTModelBody, artifact_name=None, file_path=None, *args, **kwargs):
"""
Standard constructor.
@@ -68,6 +71,7 @@ def __init__(self, name: str, body: ASTModelBody, artifact_name=None, *args, **k
self.name = name
self.body = body
self.artifact_name = artifact_name
+ self.file_path = file_path # add file path to ast model as it's needed for use of python_standalone_target_tools with SpiNNaker2
def clone(self):
"""
@@ -79,13 +83,13 @@ def clone(self):
dup = ASTModel(name=self.name,
body=self.body.clone(),
artifact_name=self.artifact_name,
+ file_path=self.file_path,
# ASTNode common attributes:
source_position=self.source_position,
scope=self.scope,
comment=self.comment,
pre_comments=[s for s in self.pre_comments],
- in_comment=self.in_comment,
- implicit_conversion_factor=self.implicit_conversion_factor)
+ in_comment=self.in_comment)
return dup
@@ -532,8 +536,58 @@ def get_initial_value(self, variable_name: str):
if var.get_complete_name() == variable_name:
return decl.get_expression()
+ for internals_block in self.get_internals_blocks():
+ for decl in internals_block.get_declarations():
+ for var in decl.variables:
+ if var.get_complete_name() == variable_name:
+ return decl.get_expression()
+
+ for parameters_block in self.get_parameters_blocks():
+ for decl in parameters_block.get_declarations():
+ for var in decl.variables:
+ if var.get_complete_name() == variable_name:
+ return decl.get_expression()
+
return None
+ def _to_base_value_from_string(self, quantity_str):
+ local_dict = {'u': u}
+ quantity = eval(quantity_str, {"__builtins__": {}}, local_dict)
+ canonical_unit = u.get_physical_type(quantity.unit)._unit
+ # Return the SI base value and unit name
+ return quantity.si.value, str(canonical_unit)
+
+ def get_parameter_value_dict(self) -> dict:
+ """
+ Generates a dict which maps the initial parameter values to their variable names from the parameters section
+ :param node: the neuron or synapse containing the parameter
+ :return: a dict {"parameter_names": initial_values}
+ """
+ parameters_block = self.get_parameters_blocks()[0]
+ parameter_value_dict = {}
+ for declarations in parameters_block.get_declarations():
+ if isinstance(declarations.expression, ASTSimpleExpression):
+ # declarations.variables[0].astropy_unit = None
+ # declarations.data_type = ' real'
+ if ((declarations.expression.numeric_literal.real is not None) and hasattr(declarations.expression.variable, 'name')):
+ expr = str(declarations.expression.numeric_literal) + '* u.' + declarations.expression.variable.name
+ float_value_in_si, unit_in_si = self._to_base_value_from_string(expr)
+ declarations.expression.numeric_literal = float_value_in_si
+ parameter_value_dict[declarations.variables[0].name] = float_value_in_si
+ declarations.expression.variable.name = unit_in_si
+ pass
+
+ if isinstance(declarations.expression, ASTExpression):
+ expr = str(declarations.expression.unary_operator) + str(
+ declarations.expression.expression.numeric_literal) + '* u.' + declarations.expression.expression.variable.name
+ float_value_in_si, unit_in_si = self._to_base_value_from_string(expr)
+ declarations.expression.expression.numeric_literal = abs(float_value_in_si)
+ parameter_value_dict[declarations.variables[0].name] = float_value_in_si
+ declarations.expression.expression.variable.name = unit_in_si
+ pass
+
+ return parameter_value_dict
+
def has_delay_variables(self) -> bool:
"""
This method indicates if the neuron has variables with a delay parameter.
diff --git a/pynestml/meta_model/ast_model_body.py b/pynestml/meta_model/ast_model_body.py
index 630d24b77..da6e9397f 100644
--- a/pynestml/meta_model/ast_model_body.py
+++ b/pynestml/meta_model/ast_model_body.py
@@ -72,8 +72,7 @@ def clone(self):
scope=self.scope,
comment=self.comment,
pre_comments=[s for s in self.pre_comments],
- in_comment=self.in_comment,
- implicit_conversion_factor=self.implicit_conversion_factor)
+ in_comment=self.in_comment)
return dup
diff --git a/pynestml/meta_model/ast_namespace_decorator.py b/pynestml/meta_model/ast_namespace_decorator.py
index 33cc63b54..d26c50672 100644
--- a/pynestml/meta_model/ast_namespace_decorator.py
+++ b/pynestml/meta_model/ast_namespace_decorator.py
@@ -47,8 +47,7 @@ def clone(self):
scope=self.scope,
comment=self.comment,
pre_comments=[s for s in self.pre_comments],
- in_comment=self.in_comment,
- implicit_conversion_factor=self.implicit_conversion_factor)
+ in_comment=self.in_comment)
return dup
diff --git a/pynestml/meta_model/ast_nestml_compilation_unit.py b/pynestml/meta_model/ast_nestml_compilation_unit.py
index 6e36f1fe3..a6982d523 100644
--- a/pynestml/meta_model/ast_nestml_compilation_unit.py
+++ b/pynestml/meta_model/ast_nestml_compilation_unit.py
@@ -63,8 +63,7 @@ def clone(self):
scope=self.scope,
comment=self.comment,
pre_comments=[s for s in self.pre_comments],
- in_comment=self.in_comment,
- implicit_conversion_factor=self.implicit_conversion_factor)
+ in_comment=self.in_comment)
return dup
diff --git a/pynestml/meta_model/ast_node.py b/pynestml/meta_model/ast_node.py
index 64234fbce..f8a302f5d 100644
--- a/pynestml/meta_model/ast_node.py
+++ b/pynestml/meta_model/ast_node.py
@@ -44,11 +44,10 @@ class ASTNode(metaclass=ABCMeta):
pre_comments = list()
in_comment = None
#
- implicit_conversion_factor = None
"""
def __init__(self, source_position: ASTSourceLocation = None, scope: Scope = None, comment: Optional[str] = None, pre_comments: Optional[List[str]] = None,
- in_comment: Optional[str] = None, implicit_conversion_factor: Optional[float] = None):
+ in_comment: Optional[str] = None):
"""
The standard constructor.
:param source_position: a source position element.
@@ -56,7 +55,6 @@ def __init__(self, source_position: ASTSourceLocation = None, scope: Scope = Non
:param comment: comment for this node
:param pre_comments: pre-comments for this node
:param in_comment: in-comment for this node
- :param implicit_conversion_factor: see set_implicit_conversion_factor()
"""
self.source_position = source_position
self.scope = scope
@@ -65,7 +63,6 @@ def __init__(self, source_position: ASTSourceLocation = None, scope: Scope = Non
pre_comments = []
self.pre_comments = pre_comments
self.in_comment = in_comment
- self.implicit_conversion_factor = implicit_conversion_factor
@abstractmethod
def clone(self):
@@ -103,22 +100,6 @@ def get_children(self) -> List[ASTNode]:
"""
pass
- def set_implicit_conversion_factor(self, implicit_factor: Optional[float]) -> None:
- """
- Sets a factor that, when applied to the (unit-typed) expression, converts it to the magnitude of the
- context where it is used. eg. Volt + milliVolt needs to either be
- 1000*Volt + milliVolt or Volt + 0.001 * milliVolt
- :param implicit_factor: the factor to be installed
- """
- self.implicit_conversion_factor = implicit_factor
-
- def get_implicit_conversion_factor(self) -> Optional[float]:
- """
- Returns the factor installed as implicitConversionFactor for this expression
- :return: the conversion factor, if present, or None
- """
- return self.implicit_conversion_factor
-
def get_source_position(self):
"""
Returns the source position of the element.
diff --git a/pynestml/meta_model/ast_ode_equation.py b/pynestml/meta_model/ast_ode_equation.py
index 95567b367..f0679d824 100644
--- a/pynestml/meta_model/ast_ode_equation.py
+++ b/pynestml/meta_model/ast_ode_equation.py
@@ -82,8 +82,7 @@ def clone(self):
scope=self.scope,
comment=self.comment,
pre_comments=[s for s in self.pre_comments],
- in_comment=self.in_comment,
- implicit_conversion_factor=self.implicit_conversion_factor)
+ in_comment=self.in_comment)
return dup
diff --git a/pynestml/meta_model/ast_on_condition_block.py b/pynestml/meta_model/ast_on_condition_block.py
index 6f38044d8..5fbd7ad66 100644
--- a/pynestml/meta_model/ast_on_condition_block.py
+++ b/pynestml/meta_model/ast_on_condition_block.py
@@ -60,8 +60,7 @@ def clone(self) -> ASTOnConditionBlock:
scope=self.scope,
comment=self.comment,
pre_comments=[s for s in self.pre_comments],
- in_comment=self.in_comment,
- implicit_conversion_factor=self.implicit_conversion_factor)
+ in_comment=self.in_comment)
return dup
diff --git a/pynestml/meta_model/ast_on_receive_block.py b/pynestml/meta_model/ast_on_receive_block.py
index 9118eea4c..707f806f8 100644
--- a/pynestml/meta_model/ast_on_receive_block.py
+++ b/pynestml/meta_model/ast_on_receive_block.py
@@ -65,8 +65,7 @@ def clone(self) -> ASTOnReceiveBlock:
scope=self.scope,
comment=self.comment,
pre_comments=[s for s in self.pre_comments],
- in_comment=self.in_comment,
- implicit_conversion_factor=self.implicit_conversion_factor)
+ in_comment=self.in_comment)
return dup
diff --git a/pynestml/meta_model/ast_output_block.py b/pynestml/meta_model/ast_output_block.py
index 33cf33245..baaddbc59 100644
--- a/pynestml/meta_model/ast_output_block.py
+++ b/pynestml/meta_model/ast_output_block.py
@@ -68,8 +68,7 @@ def clone(self):
scope=self.scope,
comment=self.comment,
pre_comments=[s for s in self.pre_comments],
- in_comment=self.in_comment,
- implicit_conversion_factor=self.implicit_conversion_factor)
+ in_comment=self.in_comment)
return dup
diff --git a/pynestml/meta_model/ast_parameter.py b/pynestml/meta_model/ast_parameter.py
index 6e30f4af5..c3fc0c967 100644
--- a/pynestml/meta_model/ast_parameter.py
+++ b/pynestml/meta_model/ast_parameter.py
@@ -62,8 +62,7 @@ def clone(self):
scope=self.scope,
comment=self.comment,
pre_comments=[s for s in self.pre_comments],
- in_comment=self.in_comment,
- implicit_conversion_factor=self.implicit_conversion_factor)
+ in_comment=self.in_comment)
return dup
diff --git a/pynestml/meta_model/ast_return_stmt.py b/pynestml/meta_model/ast_return_stmt.py
index 43719747a..ec75c06db 100644
--- a/pynestml/meta_model/ast_return_stmt.py
+++ b/pynestml/meta_model/ast_return_stmt.py
@@ -64,8 +64,7 @@ def clone(self):
scope=self.scope,
comment=self.comment,
pre_comments=[s for s in self.pre_comments],
- in_comment=self.in_comment,
- implicit_conversion_factor=self.implicit_conversion_factor)
+ in_comment=self.in_comment)
return dup
diff --git a/pynestml/meta_model/ast_simple_expression.py b/pynestml/meta_model/ast_simple_expression.py
index 8514f76d2..9172b7fc8 100644
--- a/pynestml/meta_model/ast_simple_expression.py
+++ b/pynestml/meta_model/ast_simple_expression.py
@@ -126,8 +126,7 @@ def clone(self):
scope=self.scope,
comment=self.comment,
pre_comments=[s for s in self.pre_comments],
- in_comment=self.in_comment,
- implicit_conversion_factor=self.implicit_conversion_factor)
+ in_comment=self.in_comment)
return dup
diff --git a/pynestml/meta_model/ast_small_stmt.py b/pynestml/meta_model/ast_small_stmt.py
index e570084f2..eefb54ffa 100644
--- a/pynestml/meta_model/ast_small_stmt.py
+++ b/pynestml/meta_model/ast_small_stmt.py
@@ -88,8 +88,7 @@ def clone(self):
scope=self.scope,
comment=self.comment,
pre_comments=[s for s in self.pre_comments],
- in_comment=self.in_comment,
- implicit_conversion_factor=self.implicit_conversion_factor)
+ in_comment=self.in_comment)
return dup
diff --git a/pynestml/meta_model/ast_stmt.py b/pynestml/meta_model/ast_stmt.py
index 652ad48e8..c38b64aee 100644
--- a/pynestml/meta_model/ast_stmt.py
+++ b/pynestml/meta_model/ast_stmt.py
@@ -69,8 +69,7 @@ def clone(self):
scope=self.scope,
comment=self.comment,
pre_comments=[s for s in self.pre_comments],
- in_comment=self.in_comment,
- implicit_conversion_factor=self.implicit_conversion_factor)
+ in_comment=self.in_comment)
return dup
diff --git a/pynestml/meta_model/ast_stmts_body.py b/pynestml/meta_model/ast_stmts_body.py
index 52e7159d9..d3c55906d 100644
--- a/pynestml/meta_model/ast_stmts_body.py
+++ b/pynestml/meta_model/ast_stmts_body.py
@@ -66,8 +66,7 @@ def clone(self):
scope=self.scope,
comment=self.comment,
pre_comments=[s for s in self.pre_comments],
- in_comment=self.in_comment,
- implicit_conversion_factor=self.implicit_conversion_factor)
+ in_comment=self.in_comment)
return dup
diff --git a/pynestml/meta_model/ast_unary_operator.py b/pynestml/meta_model/ast_unary_operator.py
index 2871956cc..982e3738a 100644
--- a/pynestml/meta_model/ast_unary_operator.py
+++ b/pynestml/meta_model/ast_unary_operator.py
@@ -70,8 +70,7 @@ def clone(self):
scope=self.scope,
comment=self.comment,
pre_comments=[s for s in self.pre_comments],
- in_comment=self.in_comment,
- implicit_conversion_factor=self.implicit_conversion_factor)
+ in_comment=self.in_comment)
return dup
diff --git a/pynestml/meta_model/ast_unit_type.py b/pynestml/meta_model/ast_unit_type.py
index fb4d4db88..6f229e7ae 100644
--- a/pynestml/meta_model/ast_unit_type.py
+++ b/pynestml/meta_model/ast_unit_type.py
@@ -111,8 +111,7 @@ def clone(self):
scope=self.scope,
comment=self.comment,
pre_comments=[s for s in self.pre_comments],
- in_comment=self.in_comment,
- implicit_conversion_factor=self.implicit_conversion_factor)
+ in_comment=self.in_comment)
return dup
diff --git a/pynestml/meta_model/ast_update_block.py b/pynestml/meta_model/ast_update_block.py
index 6a7f05dc3..1d0cdcbe9 100644
--- a/pynestml/meta_model/ast_update_block.py
+++ b/pynestml/meta_model/ast_update_block.py
@@ -55,8 +55,7 @@ def clone(self):
scope=self.scope,
comment=self.comment,
pre_comments=[s for s in self.pre_comments],
- in_comment=self.in_comment,
- implicit_conversion_factor=self.implicit_conversion_factor)
+ in_comment=self.in_comment)
return dup
diff --git a/pynestml/meta_model/ast_variable.py b/pynestml/meta_model/ast_variable.py
index c0645be25..ceca2cf6e 100644
--- a/pynestml/meta_model/ast_variable.py
+++ b/pynestml/meta_model/ast_variable.py
@@ -82,8 +82,7 @@ def clone(self):
scope=self.scope,
comment=self.comment,
pre_comments=[s for s in self.pre_comments],
- in_comment=self.in_comment,
- implicit_conversion_factor=self.implicit_conversion_factor)
+ in_comment=self.in_comment)
def resolve_in_own_scope(self):
from pynestml.symbols.symbol import SymbolKind
diff --git a/pynestml/meta_model/ast_while_stmt.py b/pynestml/meta_model/ast_while_stmt.py
index 1a1c51138..413fefe36 100644
--- a/pynestml/meta_model/ast_while_stmt.py
+++ b/pynestml/meta_model/ast_while_stmt.py
@@ -70,8 +70,7 @@ def clone(self):
scope=self.scope,
comment=self.comment,
pre_comments=[s for s in self.pre_comments],
- in_comment=self.in_comment,
- implicit_conversion_factor=self.implicit_conversion_factor)
+ in_comment=self.in_comment)
return dup
diff --git a/pynestml/symbols/unit_type_symbol.py b/pynestml/symbols/unit_type_symbol.py
index 1f9977de0..4c6aa4755 100644
--- a/pynestml/symbols/unit_type_symbol.py
+++ b/pynestml/symbols/unit_type_symbol.py
@@ -137,12 +137,11 @@ def add_or_sub_another_unit(self, other):
def attempt_magnitude_cast(self, other):
if self.differs_only_in_magnitude(other):
- factor = UnitTypeSymbol.get_conversion_factor(other.astropy_unit, self.astropy_unit)
- other.referenced_object.set_implicit_conversion_factor(factor)
- code, message = Messages.get_implicit_magnitude_conversion(self, other, factor)
- Logger.log_message(code=code, message=message,
- error_position=self.referenced_object.get_source_position(),
- log_level=LoggingLevel.INFO)
+ # factor = UnitTypeSymbol.get_conversion_factor(other.astropy_unit, self.astropy_unit)
+ # code, message = Messages.get_implicit_magnitude_conversion(self, other, factor)
+ # Logger.log_message(code=code, message=message,
+ # error_position=self.referenced_object.get_source_position(),
+ # log_level=LoggingLevel.INFO)
return self
diff --git a/pynestml/transformers/non_dimensionalisation_transformer.py b/pynestml/transformers/non_dimensionalisation_transformer.py
new file mode 100644
index 000000000..e7b517460
--- /dev/null
+++ b/pynestml/transformers/non_dimensionalisation_transformer.py
@@ -0,0 +1,978 @@
+# -*- coding: utf-8 -*-
+#
+# non_dimensionalisation_transformer.py
+#
+# This file is part of NEST.
+#
+# Copyright (C) 2004 The NEST Initiative
+#
+# NEST is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 2 of the License, or
+# (at your option) any later version.
+#
+# NEST is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with NEST. If not, see .
+
+from __future__ import annotations
+
+from typing import Any, Dict, Sequence, Mapping, Optional, Union
+
+
+from pynestml.cocos.co_cos_manager import CoCosManager
+from pynestml.frontend.frontend_configuration import FrontendConfiguration
+from pynestml.meta_model.ast_arithmetic_operator import ASTArithmeticOperator
+from pynestml.meta_model.ast_assignment import ASTAssignment
+from pynestml.meta_model.ast_data_type import ASTDataType
+from pynestml.meta_model.ast_declaration import ASTDeclaration
+from pynestml.meta_model.ast_equations_block import ASTEquationsBlock
+from pynestml.meta_model.ast_expression import ASTExpression
+from pynestml.meta_model.ast_function_call import ASTFunctionCall
+from pynestml.meta_model.ast_inline_expression import ASTInlineExpression
+from pynestml.meta_model.ast_model import ASTModel
+from pynestml.meta_model.ast_node import ASTNode
+from pynestml.meta_model.ast_ode_equation import ASTOdeEquation
+from pynestml.meta_model.ast_simple_expression import ASTSimpleExpression
+from pynestml.meta_model.ast_variable import ASTVariable
+from pynestml.symbols.integer_type_symbol import IntegerTypeSymbol
+from pynestml.symbols.predefined_functions import PredefinedFunctions
+from pynestml.symbols.predefined_types import PredefinedTypes
+from pynestml.symbols.predefined_units import PredefinedUnits
+from pynestml.symbols.real_type_symbol import RealTypeSymbol
+from pynestml.symbols.unit_type_symbol import UnitTypeSymbol
+from pynestml.symbols.error_type_symbol import ErrorTypeSymbol
+from pynestml.symbols.symbol import SymbolKind
+from pynestml.symbols.variable_symbol import BlockType
+from pynestml.transformers.transformer import Transformer
+from pynestml.utils.ast_utils import ASTUtils
+from pynestml.utils.model_parser import ModelParser
+from pynestml.utils.logger import Logger
+from pynestml.utils.logger import LoggingLevel
+from pynestml.utils.string_utils import removesuffix
+from pynestml.visitors.ast_parent_visitor import ASTParentVisitor
+from pynestml.visitors.ast_symbol_table_visitor import ASTSymbolTableVisitor
+from pynestml.visitors.ast_higher_order_visitor import ASTHigherOrderVisitor
+from pynestml.visitors.ast_visitor import ASTVisitor
+from pynestml.utils.logger import Logger, LoggingLevel
+import astropy.units as u
+import re
+
+
+class NonDimVis(ASTVisitor):
+ r"""
+ Base class for non-dimensionalisation transformers.
+ """
+
+ def __init__(self, preferred_prefix: Dict[str, str]):
+ super().__init__()
+ self.preferred_prefix = preferred_prefix
+
+ PREFIX_FACTORS = {
+ "Y": 1e24, # yotta
+ "Z": 1e21, # zetta
+ "E": 1e18, # exa
+ "P": 1e15, # peta
+ "T": 1e12, # tera
+ "G": 1e9, # giga
+ "M": 1e6, # mega
+ "k": 1e3, # kilo
+ "h": 1e2, # hecto
+ "da": 1e1, # deca
+ "": 1.0, # no prefix
+ "1": 1.0, # no prefix
+ "d": 1e-1, # deci
+ "c": 1e-2, # centi
+ "m": 1e-3, # milli
+ "u": 1e-6, # micro (μ)
+ "n": 1e-9, # nano
+ "p": 1e-12, # pico
+ "f": 1e-15, # femto
+ "a": 1e-18, # atto
+ "z": 1e-21, # zepto
+ "y": 1e-24, # yocto
+ }
+
+ def get_conversion_factor_to_si(self, from_unit_str):
+ r"""
+ Return the conversion factor from the unit we have in the NESTML file to SI units.
+ """
+
+ from_unit = u.Unit(from_unit_str)
+ scale = from_unit.si.scale
+
+ return scale
+
+ def _is_valid_astropy_unit(self, unit_string):
+ """Check if a string can be interpreted as an astropy unit"""
+ try:
+ u.Unit(str(unit_string))
+ return True
+ except (ValueError, TypeError, u.UnitTypeError):
+ return False
+
+
+class NonDimensionalisationVarToRealTypeVisitor(NonDimVis):
+ r"""
+ Visitor changes the variable type on the LHS to "real".
+ E.g.: My_declaration V = (30 * 1.0E-03) -> My_declaration real = (30 * 1.0E-03)
+ Visitor has to be called last in the transformation process as the unit type information is used beforehand.
+ """
+
+ def __init__(self, preferred_prefix: Dict[str, str]):
+ super().__init__(preferred_prefix)
+
+ def visit_variable(self, node: ASTVariable):
+ if not isinstance(node.get_type_symbol(), UnitTypeSymbol):
+ return
+
+ if Logger.logging_level.name == "DEBUG" or "INFO":
+ print("The unit is: " + str(node.get_type_symbol().unit.unit))
+ print(
+ "The quantity is: "
+ + str(node.get_type_symbol().unit.unit.physical_type)
+ )
+
+ parent_node = node.get_parent()
+ new_node_type = RealTypeSymbol()
+ new_variable = ASTVariable(
+ name=node.name,
+ type_symbol=node.get_type_symbol(),
+ scope=node.get_scope(),
+ )
+ new_data_type = ASTDataType(
+ is_real=True, type_symbol=new_node_type, scope=node.get_scope()
+ )
+
+ if isinstance(parent_node, ASTDeclaration):
+ parent_node.variables[0] = new_variable
+ parent_node.data_type = new_data_type
+ pass
+
+ def visit_input_port(self, node):
+ if node.data_type is not None:
+ new_node_type = RealTypeSymbol()
+ new_data_type = ASTDataType(
+ is_real=True, type_symbol=new_node_type, scope=node.get_scope()
+ )
+ parent_node = node.get_parent()
+ for index, inputportexpression in enumerate(parent_node.input_definitions):
+ if inputportexpression.name == node.name:
+ new_node = node.clone()
+ new_node.data_type = new_data_type
+ parent_node.input_definitions[index] = new_node
+
+ def visit_inline_expression(self, node):
+ if isinstance(node.data_type.type_symbol, RealTypeSymbol) or isinstance(
+ node.data_type.type_symbol, UnitTypeSymbol
+ ):
+ if isinstance(node.data_type.type_symbol, RealTypeSymbol):
+ if Logger.logging_level.name == "DEBUG" or "INFO":
+ print("\tReal number, no unit\n")
+
+ elif isinstance(node.data_type.type_symbol, UnitTypeSymbol):
+ if Logger.logging_level.name == "DEBUG" or "INFO":
+ print("The unit is: " + str(node.data_type.type_symbol.unit.unit))
+ print(
+ "The quantity is: "
+ + str(node.data_type.type_symbol.unit.unit.physical_type)
+ )
+
+ parent_node = node.get_parent()
+ new_node_type = RealTypeSymbol()
+ new_data_type = ASTDataType(
+ is_real=True, type_symbol=new_node_type, scope=node.get_scope()
+ )
+
+ if isinstance(parent_node, ASTEquationsBlock):
+ for declaration in parent_node.declarations:
+ if isinstance(declaration, ASTInlineExpression):
+ if declaration.variable_name == node.variable_name:
+ declaration.data_type = new_data_type
+ pass
+
+
+class NonDimensionalisationPreferredPrefixFactorOnRhsVisitor(NonDimVis):
+ r"""
+ Visitor inserts the inverse value of the preferred prefix in scientific notation as a factor for the old encapsulated RHS expression for declarations and ODE equations.
+ E.g.: V_m V = -70 * 1.0E-03, preferred prefix of mili for 'electric potential' -> V_m V = (1.0E+03 * (-70.0 * 1.0E-0.3))
+ """
+
+ def __init__(self, preferred_prefix: Dict[str, str], model):
+ super().__init__(preferred_prefix)
+ self.model = model
+
+ def visit_declaration(self, node: ASTVariable) -> None:
+ # get preferred prefix that declaring variable has
+ if node.data_type.is_real:
+ return
+
+ if str(node.data_type.type_symbol.astropy_unit.physical_type) != "unknown":
+ if node.variables[0].name == "__h":
+ return
+
+ for physical_type_string in self.preferred_prefix:
+ if physical_type_string in str(node.data_type.type_symbol.astropy_unit.physical_type):
+ variable_physical_type_string = physical_type_string
+
+
+
+
+ inverse_preferred_prefix_this_node_string = f"{1 / self.PREFIX_FACTORS[self.preferred_prefix[variable_physical_type_string]]:.1E}"
+
+ # modify the node.expression to include the metric prefix as a factor in scientific notation on the lhs
+ cloned_node = node.clone()
+ lhs_expression = ASTSimpleExpression(
+ numeric_literal=float(
+ inverse_preferred_prefix_this_node_string
+ ),
+ scope=node.get_scope(),
+ )
+ rhs_expression = node.expression
+ new_sub_node = ASTExpression(
+ is_encapsulated=False,
+ binary_operator=ASTArithmeticOperator(is_times_op=True),
+ lhs=lhs_expression,
+ rhs=rhs_expression,
+ scope=node.get_scope(),
+ )
+ cloned_node.expression = ASTExpression(
+ is_encapsulated=True,
+ expression=new_sub_node,
+ scope=node.get_scope(),
+ )
+
+ for declaration in node.get_parent().declarations:
+ if declaration.variables[0].name == node.variables[0].name:
+ declaration.expression = cloned_node.expression
+
+ @staticmethod
+ def _derivate_regex(var_names: list) -> re:
+ pattern = rf"^({'|'.join(map(re.escape, var_names))})('+)?$"
+ return re.compile(pattern)
+
+ def visit_ode_equation(self, node: ASTOdeEquation):
+ # insert preferred prefix conversion factor for LHS on rhs
+ var_names = [
+ str(obj)
+ for obj in ASTUtils.all_variables_defined_in_block(
+ self.model.get_state_blocks() + self.model.get_parameters_blocks()
+ )
+ ]
+ regex = self._derivate_regex(var_names)
+ corresponding_non_diff_variable = regex.match(node.lhs.name).group()
+ if hasattr(
+ ASTUtils.get_variable_by_name(
+ self.model, corresponding_non_diff_variable
+ ).type_symbol,
+ "astropy_unit",
+ ):
+ corresponding_non_diff_variable_physical_type_string = str(
+ ASTUtils.get_variable_by_name(
+ self.model, corresponding_non_diff_variable
+ ).type_symbol.astropy_unit.physical_type
+ )
+ inverse_preferred_prefix_this_node_string = f"{1e-3 * 1 / self.PREFIX_FACTORS[self.preferred_prefix[corresponding_non_diff_variable_physical_type_string]]:.1E}"
+ cloned_node = node.clone()
+ lhs_expression = ASTSimpleExpression(
+ numeric_literal=float(inverse_preferred_prefix_this_node_string),
+ scope=node.get_scope(),
+ )
+ rhs_expression = ASTExpression(is_encapsulated=True, expression=node.rhs)
+ new_sub_node = ASTExpression(
+ is_encapsulated=False,
+ binary_operator=ASTArithmeticOperator(is_times_op=True),
+ lhs=lhs_expression,
+ rhs=rhs_expression,
+ scope=node.get_scope(),
+ )
+ cloned_node.rhs = ASTExpression(
+ is_encapsulated=True, expression=new_sub_node, scope=node.get_scope()
+ )
+ for declaration in node.get_parent().declarations:
+ if isinstance(declaration, ASTOdeEquation):
+ if declaration.lhs.name == node.lhs.name:
+ declaration.rhs.type = RealTypeSymbol()
+ declaration.rhs = cloned_node.rhs
+ return
+
+ def visit_assignment(self, node):
+ # insert reciprocal of preferred prefix on RHS
+ for state_declaration in self.model.get_state_blocks()[0].declarations:
+ if (
+ state_declaration.variables[0].name
+ == node.lhs.name
+ ):
+ if not isinstance(
+ state_declaration.variables[0].type_symbol, RealTypeSymbol
+ ):
+ corresponding_physical_type_string = str(
+ state_declaration.variables[
+ 0
+ ].type_symbol.astropy_unit.physical_type
+ )
+ inverse_preferred_prefix_this_node_string = f"{1 / self.PREFIX_FACTORS[self.preferred_prefix[corresponding_physical_type_string]]:.1E}"
+ cloned_parent_node = node.get_parent()
+ lhs_expression = ASTSimpleExpression(
+ numeric_literal=float(
+ inverse_preferred_prefix_this_node_string
+ ),
+ scope=node.get_scope(),
+ )
+ rhs_expression = ASTExpression(
+ is_encapsulated=True,
+ expression=node.stmts_body.stmts[
+ 0
+ ].small_stmt.assignment.rhs,
+ )
+ new_sub_node = ASTExpression(
+ is_encapsulated=False,
+ binary_operator=ASTArithmeticOperator(is_times_op=True),
+ lhs=lhs_expression,
+ rhs=rhs_expression,
+ scope=node.get_scope(),
+ )
+ cloned_node.stmts_body.stmts[
+ 0
+ ].small_stmt.assignment.rhs = ASTExpression(
+ is_encapsulated=True,
+ expression=new_sub_node,
+ scope=node.get_scope(),
+ )
+ assignment = node.stmts_body.stmts[0].small_stmt.assignment
+ inverse_value = float(inverse_preferred_prefix_this_node_string)
+
+ new_rhs = ASTExpression(
+ is_encapsulated=True,
+ expression=ASTExpression(
+ is_encapsulated=False,
+ binary_operator=ASTArithmeticOperator(is_times_op=True),
+ lhs=ASTSimpleExpression(
+ numeric_literal=inverse_value,
+ scope=node.get_scope(),
+ ),
+ rhs=ASTExpression(
+ is_encapsulated=True,
+ expression=assignment.rhs,
+ scope=node.get_scope(),
+ ),
+ scope=node.get_scope(),
+ ),
+ scope=node.get_scope(),
+ )
+ assignment.rhs = new_rhs
+
+
+ # def visit_on_receive_block(self, node):
+ # # insert reciprocal of preferred prefix on RHS
+ # if isinstance(node.stmts_body.stmts[0].small_stmt.assignment, ASTAssignment):
+ # for state_declaration in self.model.get_state_blocks()[0].declarations:
+ # if (
+ # state_declaration.variables[0].name
+ # == node.stmts_body.stmts[0].small_stmt.assignment.lhs.name
+ # ):
+ # if not isinstance(
+ # state_declaration.variables[0].type_symbol, RealTypeSymbol
+ # ):
+ # corresponding_physical_type_string = str(
+ # state_declaration.variables[
+ # 0
+ # ].type_symbol.astropy_unit.physical_type
+ # )
+ # inverse_preferred_prefix_this_node_string = f"{1 / self.PREFIX_FACTORS[self.preferred_prefix[corresponding_physical_type_string]]:.1E}"
+ # cloned_node = node.clone()
+ # lhs_expression = ASTSimpleExpression(
+ # numeric_literal=float(
+ # inverse_preferred_prefix_this_node_string
+ # ),
+ # scope=node.get_scope(),
+ # )
+ # rhs_expression = ASTExpression(
+ # is_encapsulated=True,
+ # expression=node.stmts_body.stmts[
+ # 0
+ # ].small_stmt.assignment.rhs,
+ # )
+ # new_sub_node = ASTExpression(
+ # is_encapsulated=False,
+ # binary_operator=ASTArithmeticOperator(is_times_op=True),
+ # lhs=lhs_expression,
+ # rhs=rhs_expression,
+ # scope=node.get_scope(),
+ # )
+ # cloned_node.stmts_body.stmts[
+ # 0
+ # ].small_stmt.assignment.rhs = ASTExpression(
+ # is_encapsulated=True,
+ # expression=new_sub_node,
+ # scope=node.get_scope(),
+ # )
+ # assignment = node.stmts_body.stmts[0].small_stmt.assignment
+ # inverse_value = float(inverse_preferred_prefix_this_node_string)
+
+ # new_rhs = ASTExpression(
+ # is_encapsulated=True,
+ # expression=ASTExpression(
+ # is_encapsulated=False,
+ # binary_operator=ASTArithmeticOperator(is_times_op=True),
+ # lhs=ASTSimpleExpression(
+ # numeric_literal=inverse_value,
+ # scope=node.get_scope(),
+ # ),
+ # rhs=ASTExpression(
+ # is_encapsulated=True,
+ # expression=assignment.rhs,
+ # scope=node.get_scope(),
+ # ),
+ # scope=node.get_scope(),
+ # ),
+ # scope=node.get_scope(),
+ # )
+ # assignment.rhs = new_rhs
+
+ def visit_inline_expression(self, node):
+ if not node.data_type.is_real:
+ if str(node.data_type.type_symbol.astropy_unit.physical_type) != "unknown":
+ for physical_type_string in self.preferred_prefix:
+ if physical_type_string in str(
+ node.data_type.type_symbol.astropy_unit.physical_type
+ ):
+ variable_physical_type_string = physical_type_string
+ inverse_preferred_prefix_this_node_string = f"{1 / self.PREFIX_FACTORS[self.preferred_prefix[variable_physical_type_string]]:.1E}"
+ # modify the node.expression to include the metric prefix as a factor in scientific notation on the lhs
+ cloned_node = node.clone()
+ lhs_expression = ASTSimpleExpression(
+ numeric_literal=float(inverse_preferred_prefix_this_node_string),
+ scope=node.get_scope(),
+ )
+ rhs_expression = node.expression
+ new_sub_node = ASTExpression(
+ is_encapsulated=False,
+ binary_operator=ASTArithmeticOperator(is_times_op=True),
+ lhs=lhs_expression,
+ rhs=rhs_expression,
+ scope=node.get_scope(),
+ )
+ cloned_node.expression = ASTExpression(
+ is_encapsulated=True,
+ expression=new_sub_node,
+ scope=node.get_scope(),
+ )
+
+ for declaration in node.get_parent().declarations:
+ if hasattr(declaration, "variable_name"):
+ if declaration.variable_name == node.variable_name:
+ declaration.expression = cloned_node.expression
+
+class NonDimensionalisationRHSVariableVisitor(NonDimVis):
+ r"""
+ Visitor inserts the inverse value of the preferred prefix in scientific notation as a factor for the old encapsulated RHS expression for declarations and ODE equations.
+ E.g.: V_m V = -70 * 1.0E-03, preferred prefix of mili for 'electric potential' -> V_m V = (1.0E+03 * (-70.0 * 1.0E-0.3))
+ """
+
+ def __init__(self, preferred_prefix: Dict[str, str], model):
+ super().__init__(preferred_prefix)
+ self.model = model
+
+ def visit_declaration(self, node: ASTDeclaration) -> None:
+ print("Visit decl " + str(node))
+ node.get_expression().accept(NonDimensionalisationVariableVisitor(self.preferred_prefix))
+
+ def visit_ode_equation(self, node: ASTOdeEquation):
+ node.get_rhs().accept(NonDimensionalisationVariableVisitor(self.preferred_prefix))
+
+ def visit_inline_expression(self, node: ASTInlineExpression):
+ node.get_expression().accept(NonDimensionalisationVariableVisitor(self.preferred_prefix))
+
+ def visit_assignment(self, node: ASTAssignment):
+ node.get_expression().accept(NonDimensionalisationVariableVisitor(self.preferred_prefix))
+
+
+
+class NonDimensionalisationNumericLiteralVisitor(NonDimVis):
+ r"""
+ Visitor changes unit symbols and numeric prefixes to numerical factors in epxressions on RHSs, where the numerical prefix and unit are positioned after an expression
+ E.g.: Var_a V = .... + (4 + 3) * mV -> Var_a V = .... + ((4 + 3) * 1.0E-03)
+ """
+
+ def __init__(self, preferred_prefix: Dict[str, str]):
+ super().__init__(preferred_prefix)
+
+ def visit_variable(self, node: ASTVariable) -> None:
+ if not hasattr(node.get_parent(), "variable"):
+ return
+
+ symbol_is_defined_as_a_variable: bool = node.get_scope().resolve_to_symbol(node.get_complete_name(), SymbolKind.VARIABLE)
+ if symbol_is_defined_as_a_variable or (not super()._is_valid_astropy_unit(node.name)):
+ return
+
+ if (node.get_parent().variable.name != node.get_name()
+ or node.get_parent().numeric_literal is None
+ ):
+ return
+
+ # The variable encountered is something like mV, without a numeric literal in front, e.g. (4 + 3) * mV
+ assert PredefinedUnits.is_unit(node.get_name())
+ conversion_factor = float(f"{super().get_conversion_factor_to_si(node.get_name()):.1E}")
+
+ if conversion_factor == 1:
+ # skip conversation with factor 1
+ return
+
+ parent_node = node.get_parent()
+ assert isinstance(parent_node, ASTSimpleExpression), "Don't know how to handle ASTVariable that is not inside an ASTSimpleExpression"
+
+ parent_node.set_numeric_literal(conversion_factor * parent_node.get_numeric_literal())
+ parent_node.set_variable(None)
+
+
+class NonDimensionalisationVariableVisitor(NonDimVis):
+ r"""
+ Visitor changes variables from their preferred prefix into an SI prefix, e.g. "V_m" --> "1E-3 * V_m"
+ """
+
+ def __init__(self, preferred_prefix: Dict[str, str]):
+ super().__init__(preferred_prefix)
+
+ def visit_variable(self, node: ASTVariable) -> None:
+ print("NonDimensionalisationVariableVisitor: visit_variable(" + str(node))
+ if node.get_type_symbol():
+ type_sym = node.get_type_symbol()
+ else:
+ var_sym = node.get_scope().resolve_to_symbol(node.get_complete_name(), SymbolKind.VARIABLE)
+ type_sym = var_sym.type_symbol
+
+ var_unit = type_sym.unit.unit
+ var_quantity = var_unit.physical_type
+ var_preferred_prefix = self.preferred_prefix[str(var_quantity)]
+ conversion_factor_to_si = float(self.PREFIX_FACTORS[var_preferred_prefix])
+
+ # multiply the variable by conversion_factor_to_si
+ parent_node = node.get_parent()
+
+ assert isinstance(parent_node, ASTSimpleExpression), "Don't know how to handle ASTVariable that is not inside an ASTSimpleExpression"
+
+
+
+ lhs_expression = ASTSimpleExpression(
+ numeric_literal=conversion_factor_to_si,
+ scope=node.get_scope(),
+ )
+ rhs_expression = ASTSimpleExpression(
+ variable=node,
+ scope=node.get_scope(),
+ )
+ new_sub_node = ASTExpression(
+ is_encapsulated=False,
+ binary_operator=ASTArithmeticOperator(is_times_op=True),
+ lhs=lhs_expression,
+ rhs=rhs_expression,
+ scope=node.get_scope(),
+ )
+ new_node = ASTExpression(
+ is_encapsulated=True,
+ expression=new_sub_node,
+ scope=node.get_scope()
+ )
+
+ grandparent_node = parent_node.get_parent()
+ if isinstance(grandparent_node, ASTDeclaration):
+ # something of the form: ``V_m mV = E_L``
+ # change it into ``V_m mv = conversion_factor_to_si * E_L``
+
+ grandparent_node.set_expression(new_sub_node)
+ elif isinstance(grandparent_node, ASTExpression) and grandparent_node.get_binary_operator() is not None and grandparent_node.get_binary_operator().is_times_op:
+ # of the form "1000 * V_m" or "V_m * foo"
+
+ if grandparent_node.rhs == parent_node:
+ grandparent_node.rhs = new_node
+ else:
+ assert grandparent_node.lhs == parent_node
+ grandparent_node.lhs = new_node
+ else:
+ raise Exception("Don't know how to handle grandparent node type: " + str(type(grandparent_node)))
+
+
+class NonDimensionalisationSimpleExpressionVisitor(NonDimVis):
+ r"""
+ Visitor converts unit-ful simple expressions with metric prefixes to real type expressions in the corresponding SI base unit in RHSs.
+ E.g.:
+
+ .. code:: NESTML
+
+ Var_a V = ...... * 3 MV
+
+ becomes
+
+ .. code:: NESTML
+
+ Var_a V = ...... * (3 * 1.0E+06)
+ """
+
+ def __init__(self, preferred_prefix: Dict[str, str], model):
+ super().__init__(preferred_prefix)
+ self.model = model
+
+ def _handle_node(self, node):
+ if hasattr(node, "variable"):
+ if str(node.variable) == "spikes":
+ return # spikes have 1/s in NESTML
+
+ if node.get_numeric_literal() is not None:
+ print("Numeric literal: " + str(node.get_numeric_literal()))
+ if isinstance(node.type, RealTypeSymbol):
+ print("\tReal number, no unit\n")
+ return
+ elif isinstance(node.type, UnitTypeSymbol):
+ # the expression 3 MV is a SimpleExpression for example
+ parent_node = node.get_parent()
+ print("\tUnit: " + str(node.type.unit.unit))
+ conversion_factor = (
+ f"{super().get_conversion_factor_to_si(node.variable.name):.1E}"
+ )
+ numeric_literal = node.get_numeric_literal()
+ lhs_expression = ASTSimpleExpression(
+ numeric_literal=float(numeric_literal), scope=node.get_scope()
+ )
+ rhs_expression = ASTSimpleExpression(
+ numeric_literal=float(str(conversion_factor)),
+ scope=node.get_scope(),
+ )
+ if isinstance(parent_node, ASTExpression):
+ new_sub_node = ASTExpression(
+ is_encapsulated=False,
+ binary_operator=ASTArithmeticOperator(is_times_op=True),
+ lhs=lhs_expression,
+ rhs=rhs_expression,
+ scope=node.get_scope(),
+ )
+ new_node = ASTExpression(
+ is_encapsulated=True,
+ expression=new_sub_node,
+ scope=node.get_scope(),
+ unary_operator=parent_node.unary_operator,
+ )
+ if parent_node.binary_operator is not None:
+ parent_node.binary_operator = parent_node.binary_operator
+ if parent_node.rhs == node:
+ parent_node.rhs = new_node
+ elif parent_node.lhs == node:
+ parent_node.lhs = new_node
+ else:
+ raise Exception(
+ "Node is neither lhs nor rhs of parent, possibly expression - should not execute until here."
+ )
+ elif parent_node.binary_operator is None:
+ parent_node.rhs = None
+ parent_node.expression = new_node
+ parent_node.unary_operator = None
+ else:
+ raise Exception("This case is also possible and needs handling")
+ if isinstance(parent_node, ASTDeclaration):
+ new_sub_node = ASTExpression(
+ is_encapsulated=False,
+ binary_operator=ASTArithmeticOperator(is_times_op=True),
+ lhs=lhs_expression,
+ rhs=rhs_expression,
+ scope=node.get_scope(),
+ )
+ new_node = ASTExpression(
+ is_encapsulated=True,
+ expression=new_sub_node,
+ scope=node.get_scope(),
+ )
+ parent_node.expression = new_node
+
+ elif isinstance(node.type, IntegerTypeSymbol):
+ print("\tInteger type number, no unit\n")
+ else:
+ raise Exception(
+ "Node type is neither RealTypeSymbol nor UnitTypeSymbol"
+ )
+ return
+
+ if node.function_call is None:
+ if isinstance(node.get_parent(), ASTFunctionCall) and node.get_parent().get_name() in [PredefinedFunctions.INTEGRATE_ODES, PredefinedFunctions.CONVOLVE, PredefinedFunctions.DELTA, PredefinedFunctions.TIME_STEPS, PredefinedFunctions.EMIT_SPIKE]:
+ # skip certain predefined functions, e.g. don't add factors to "integrate_odes(V_m)" call
+ return
+
+ if node.get_numeric_literal() is None:
+ # get physical type of node
+ if isinstance(node.type, UnitTypeSymbol):
+ if "spikes" not in node.variable.name:
+ symbol_is_defined_as_a_variable: bool = node.variable.get_scope().resolve_to_symbol(node.variable.get_complete_name(), SymbolKind.VARIABLE) is not None
+ if ((not symbol_is_defined_as_a_variable)
+ and super()._is_valid_astropy_unit(node.variable.name)
+ and (node.get_parent().binary_operator is not None
+ or node.get_parent().unary_operator is not None)):
+ # This should be handled by visit_variable instead - return early
+ return
+ else:
+ if str(node.type.astropy_unit.physical_type) != "unknown":
+ variable_physical_type_string = "error"
+ for physical_type_string in self.preferred_prefix:
+ if physical_type_string in str(
+ node.type.astropy_unit.physical_type
+ ):
+ variable_physical_type_string = (
+ physical_type_string
+ )
+ # get preferred prefix for this node
+ if variable_physical_type_string == "error":
+ raise Exception("AstroPy Physical Type \"" + str(node.type.astropy_unit.physical_type) + "\" could not be determined. Is it really defined in preferred prefixes?")
+ preferred_prefix_this_node_string = f"{self.PREFIX_FACTORS[self.preferred_prefix[variable_physical_type_string]]:.1E}"
+ # create a new sub node that multiplies the variable with the reciprocal of the preferred prefix
+ lhs_expression = node.clone()
+ rhs_expression = ASTSimpleExpression(
+ numeric_literal=float(
+ preferred_prefix_this_node_string
+ ),
+ scope=node.get_scope(),
+ )
+ new_sub_node = ASTExpression(
+ is_encapsulated=False,
+ binary_operator=ASTArithmeticOperator(
+ is_times_op=True
+ ),
+ lhs=lhs_expression,
+ rhs=rhs_expression,
+ scope=node.get_scope(),
+ )
+ parent_node = node.get_parent()
+ if hasattr(parent_node, "unary_operator"):
+ # create new node encapsulating multiplication
+ new_node = ASTExpression(
+ is_encapsulated=True,
+ expression=new_sub_node,
+ scope=node.get_scope(),
+ unary_operator=parent_node.unary_operator,
+ )
+ # attach new node to parent node
+ grandparent_node = parent_node.get_parent()
+ if any(
+ hasattr(parent_node, attr)
+ for attr in ["lhs", "rhs"]
+ ):
+ if node == parent_node.lhs:
+ if parent_node.binary_operator is not None:
+ parent_node.binary_operator = (
+ parent_node.binary_operator
+ )
+ parent_node.lhs = new_node
+ parent_node.rhs = parent_node.rhs
+ return
+ elif parent_node.binary_operator is None:
+ parent_node.rhs = None
+ parent_node.expression = new_node
+ parent_node.unary_operator = None
+ return
+ if node == parent_node.rhs:
+ if parent_node.binary_operator is not None:
+ parent_node.binary_operator = (
+ parent_node.binary_operator
+ )
+ parent_node.rhs = new_node
+ parent_node.lhs = parent_node.lhs
+ return
+ elif parent_node.binary_operator is None:
+ parent_node.rhs = None
+ parent_node.expression = new_node
+ parent_node.unary_operator = None
+ return
+ if parent_node == grandparent_node.lhs:
+ grandparent_node.lhs = new_node
+ return
+ if parent_node == parent_node.rhs:
+ grandparent_node.rhs = new_node
+ return
+ elif parent_node == parent_node.expression:
+ parent_node.expression = new_node
+ return
+ else:
+ raise Exception(
+ "Parent node has no attribute lhs, rhs or expression."
+ )
+ elif not (hasattr(parent_node, "unary_operator")):
+ # create new node encapsulating multiplication
+ new_node = ASTExpression(
+ is_encapsulated=True,
+ expression=new_sub_node,
+ scope=node.get_scope(),
+ )
+ # attach new node to parent node
+ if any(
+ hasattr(parent_node, attr)
+ for attr in ["lhs", "rhs"]
+ ):
+ if node == parent_node.lhs:
+ if parent_node.binary_operator is not None:
+ parent_node.binary_operator = (
+ parent_node.binary_operator
+ )
+ parent_node.lhs = new_node
+ parent_node.rhs = parent_node.rhs
+ return
+ elif parent_node.binary_operator is None:
+ parent_node.rhs = None
+ parent_node.expression = new_node
+ parent_node.unary_operator = None
+ return
+ if node == parent_node.rhs:
+ if not hasattr(node, "binary_operator"):
+ parent_node.expression = new_node
+ return
+ elif (
+ parent_node.binary_operator is not None
+ ):
+ parent_node.binary_operator = (
+ parent_node.binary_operator
+ )
+ parent_node.rhs = new_node
+ parent_node.lhs = parent_node.lhs
+ return
+ elif parent_node.binary_operator is None:
+ parent_node.rhs = None
+ parent_node.expression = new_node
+ parent_node.unary_operator = None
+ return
+ elif hasattr(parent_node, "expression"):
+ parent_node.args = new_node
+ return
+ elif hasattr(parent_node, "args"):
+ for i, arg in enumerate(parent_node.args):
+ if node == arg:
+ parent_node.args[i] = new_node
+ return
+ raise Exception("arg not found in parent node arguments list")
+ else:
+ raise Exception("Parent node has no rhs or lhs.")
+
+ def visit_simple_expression(self, node):
+ # if node.function_call is not None:
+ # print("Function call")
+ # for arg in node.function_call.get_args():
+ # self.visit(arg)
+
+ # import pdb;pdb.set_trace()
+
+
+ # else:
+ self._handle_node(node)
+
+ super().visit_simple_expression(node)
+
+
+class NonDimensionalisationTransformer(Transformer):
+ r"""Remove all units from the model and replace them with real type.
+
+ The transformer relates to [PR-1217](https://github.com/nest/nestml/pull/1217) and [Issue-984](https://github.com/nest/nestml/issues/984).
+
+ The correct transformation of the expressions inside a NESTML file should be checked. The tests should include:
+ - checking for all metric prefixes
+ - checking for nested expressions with metric prefixes
+ - checking that transformations occur in every part of the NESTML file units are specified
+ - checking of transformation for derived variables
+ - checking for transformation of reciprocal units/ expressions with reciprocal units
+ - does it make sense for these to have the same desired unit?
+ - E.g.: desired unit of 'electrical potential' is mV -> should variables with physical type of '1/V' also be then expressed as '1/mV' post transformation?
+ - see *test_reciprocal_unit_in_paramterblock*
+ - checking additional parenthesis are set correctly
+
+ In a second instance the unit arithmetic and consistency of physical types needs to be checked pre-transformation after the original AST is built:
+ - will the expression on the RHS of an equation yield a unit that is a unit of what is specified on the LHS of the equation?
+ - How should exceptions be handled, for example if LHS is 'V' but result on RHS is '1/V'?
+ - Are the arguments inside of functions like exp(), log(), sin(), etc. dimensionless or has the user made a mistake?
+ - What should happen if unknown units are encountered?
+
+ These tests can be found in ``tests/nest_tests/non_dimensionalisation_transformer``.
+ """
+
+ _default_options = {
+ "quantity_to_preferred_prefix": {},
+ "variable_to_preferred_prefix": {},
+ }
+
+ def __init__(self, options: Optional[Mapping[str, Any]] = None):
+ super(Transformer, self).__init__(options)
+
+ def transform_(
+ self, model: Union[ASTNode, Sequence[ASTNode]]
+ ) -> Union[ASTNode, Sequence[ASTNode]]:
+ transformed_model = model.clone()
+ transformed_model.accept(ASTParentVisitor())
+ transformed_model.accept(ASTSymbolTableVisitor())
+
+ literal_visitor = NonDimensionalisationNumericLiteralVisitor(
+ self.get_option("quantity_to_preferred_prefix")
+ )
+ simple_expression_visitor = NonDimensionalisationSimpleExpressionVisitor(
+ self.get_option("quantity_to_preferred_prefix"), model
+ )
+ rhs_preferred_prefix_visitor = NonDimensionalisationPreferredPrefixFactorOnRhsVisitor(
+ self.get_option("quantity_to_preferred_prefix"), model
+ )
+ var_to_real_type_visitor = NonDimensionalisationVarToRealTypeVisitor(
+ self.get_option("quantity_to_preferred_prefix")
+ )
+ rhs_variable_visitor = NonDimensionalisationRHSVariableVisitor(
+ self.get_option("quantity_to_preferred_prefix"), model
+ )
+
+ transformed_model.accept(ASTParentVisitor())
+ transformed_model.accept(literal_visitor)
+
+ if Logger.logging_level.name == "DEBUG":
+ print("NonDimensionalisationTransformer(): model after literal visitor:")
+ print(transformed_model)
+
+ transformed_model.accept(rhs_variable_visitor)
+
+ if Logger.logging_level.name == "DEBUG":
+ print("NonDimensionalisationTransformer(): model after variable visitor:")
+ print(transformed_model)
+
+
+ # transformed_model.accept(simple_expression_visitor)
+ transformed_model.accept(rhs_preferred_prefix_visitor)
+
+ if Logger.logging_level.name == "DEBUG":
+ print("NonDimensionalisationTransformer(): model after rhs_preferred_prefix_visitor visitor:")
+ print(transformed_model)
+
+
+ transformed_model.accept(var_to_real_type_visitor)
+
+ if Logger.logging_level.name == "DEBUG":
+ print("NonDimensionalisationTransformer(): model after var_to_real_type_visitor visitor:")
+ print(transformed_model)
+
+
+ transformed_model.accept(ASTParentVisitor())
+ transformed_model.accept(ASTSymbolTableVisitor())
+ # if Logger.logging_level.name == "DEBUG":
+ # print("NonDimensionalisationTransformer(): model after transformation:")
+ # print(transformed_model)
+
+ return transformed_model
+
+ def transform(
+ self, models: Union[ASTNode, Sequence[ASTNode]]
+ ) -> Union[ASTNode, Sequence[ASTNode]]:
+ transformed_models = []
+
+ single = False
+ if isinstance(models, ASTNode):
+ single = True
+ model = [models]
+
+ for model in models:
+ transformed_models.append(self.transform_(model))
+
+ if single:
+ return transformed_models[0]
+
+ return transformed_models
diff --git a/pynestml/utils/ast_utils.py b/pynestml/utils/ast_utils.py
index ef51e0812..e3cd1d1a3 100644
--- a/pynestml/utils/ast_utils.py
+++ b/pynestml/utils/ast_utils.py
@@ -26,6 +26,8 @@
import odetoolbox
+from astropy import units as u
+
from pynestml.codegeneration.printers.ast_printer import ASTPrinter
from pynestml.codegeneration.printers.cpp_variable_printer import CppVariablePrinter
from pynestml.frontend.frontend_configuration import FrontendConfiguration
@@ -1579,6 +1581,24 @@ def get_parameter_variable_by_name(cls, node: ASTModel, var_name: str) -> ASTVar
return var
return None
+ @staticmethod
+ def _to_base_value_from_string(quantity_str):
+ local_dict = {'u': u}
+ quantity = eval(quantity_str, {"__builtins__": {}}, local_dict)
+ canonical_unit = u.get_physical_type(quantity.unit)._unit
+ # Return the SI base value and unit name
+ return quantity.si.value, str(canonical_unit)
+
+ @classmethod
+ def generate_updated_state_dict(cls, initial_values: dict, parameter_value_dict: dict) -> dict:
+ updated_state_dict = {}
+ for key, value in initial_values.items():
+ if value in parameter_value_dict:
+ updated_state_dict[key] = float(parameter_value_dict[value])
+ else:
+ updated_state_dict[key] = float(value)
+ return updated_state_dict
+
@classmethod
def get_internal_decl_by_name(cls, node: ASTModel, var_name: str) -> ASTDeclaration:
"""
@@ -2266,6 +2286,18 @@ def remove_kernel_definitions_from_equations_block(cls, model: ASTModel) -> ASTD
return decl_to_remove
+ @classmethod
+ def add_timestep_symbol(cls, model: ASTModel) -> None:
+ """
+ Add timestep variable to the internals block
+ """
+ from pynestml.utils.model_parser import ModelParser
+ assert model.get_initial_value(
+ "__h") is None, "\"__h\" is a reserved name, please do not use variables by this name in your NESTML file"
+ assert not "__h" in [sym.name for sym in model.get_internal_symbols(
+ )], "\"__h\" is a reserved name, please do not use variables by this name in your NESTML file"
+ model.add_to_internals_block(ModelParser.parse_declaration('__h ms = resolution()'), index=0)
+
@classmethod
def generate_kernel_buffers(cls, model: ASTModel, equations_block: Union[ASTEquationsBlock, List[ASTEquationsBlock]]) -> Mapping[ASTKernel, ASTInputPort]:
"""
diff --git a/pynestml/utils/cloning_helpers.py b/pynestml/utils/cloning_helpers.py
index 5b9976c93..d1ca3b929 100644
--- a/pynestml/utils/cloning_helpers.py
+++ b/pynestml/utils/cloning_helpers.py
@@ -30,7 +30,7 @@ def clone_numeric_literal(numeric_literal):
# Python basic type
return numeric_literal
- if type(numeric_literal) in [np.int, np.int8, np.int16, np.int32, np.int64]:
+ if type(numeric_literal) in [np.int8, np.int16, np.int32, np.int64, np.float64]:
# NumPy types
return numeric_literal.copy()
diff --git a/pynestml/utils/messages.py b/pynestml/utils/messages.py
index 1930d91e0..233fc83b0 100644
--- a/pynestml/utils/messages.py
+++ b/pynestml/utils/messages.py
@@ -216,12 +216,6 @@ def get_convolve_needs_buffer_parameter(cls):
message = 'Convolve requires a buffer variable as second parameter!'
return MessageCode.CONVOLVE_NEEDS_BUFFER_PARAMETER, message
- @classmethod
- def get_implicit_magnitude_conversion(cls, lhs, rhs, conversion_factor):
- message = 'Implicit magnitude conversion from %s to %s with factor %s ' % (
- lhs.print_symbol(), rhs.print_symbol(), conversion_factor)
- return MessageCode.IMPLICIT_CAST, message
-
@classmethod
def get_function_call_implicit_cast(
cls,
@@ -269,6 +263,7 @@ def get_type_could_not_be_derived(cls, rhs):
message = 'Type of \'%s\' could not be derived!' % rhs
return MessageCode.TYPE_NOT_DERIVABLE, message
+ # XXX: this should be removed
@classmethod
def get_implicit_cast_rhs_to_lhs(cls, rhs_type, lhs_type):
"""
diff --git a/pynestml/utils/model_parser.py b/pynestml/utils/model_parser.py
index f05cdf0e1..497c5acc3 100644
--- a/pynestml/utils/model_parser.py
+++ b/pynestml/utils/model_parser.py
@@ -71,7 +71,6 @@
from pynestml.utils.error_listener import NestMLErrorListener
from pynestml.utils.logger import Logger, LoggingLevel
from pynestml.utils.messages import Messages
-from pynestml.visitors.assign_implicit_conversion_factors_visitor import AssignImplicitConversionFactorsVisitor
from pynestml.visitors.ast_builder_visitor import ASTBuilderVisitor
from pynestml.visitors.ast_higher_order_visitor import ASTHigherOrderVisitor
from pynestml.visitors.ast_parent_visitor import ASTParentVisitor
@@ -153,7 +152,6 @@ def parse_file(cls, file_path=None):
model.accept(ASTSymbolTableVisitor())
SymbolTable.add_model_scope(model.get_name(), model.get_scope())
Logger.set_current_node(model)
- model.accept(AssignImplicitConversionFactorsVisitor())
Logger.set_current_node(None)
# store source paths
diff --git a/pynestml/utils/type_caster.py b/pynestml/utils/type_caster.py
deleted file mode 100644
index 2f7827bad..000000000
--- a/pynestml/utils/type_caster.py
+++ /dev/null
@@ -1,69 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# type_caster.py
-#
-# This file is part of NEST.
-#
-# Copyright (C) 2004 The NEST Initiative
-#
-# NEST is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 2 of the License, or
-# (at your option) any later version.
-#
-# NEST is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with NEST. If not, see .
-
-from pynestml.symbols.unit_type_symbol import UnitTypeSymbol
-from pynestml.utils.logger import Logger, LoggingLevel
-from pynestml.utils.messages import Messages
-
-
-class TypeCaster:
- @staticmethod
- def do_magnitude_conversion_rhs_to_lhs(_rhs_type_symbol, _lhs_type_symbol, _containing_expression):
- """
- Determine conversion factor from rhs to lhs, register it with the relevant expression
- """
- _containing_expression.set_implicit_conversion_factor(
- UnitTypeSymbol.get_conversion_factor(_rhs_type_symbol.astropy_unit,
- _lhs_type_symbol.astropy_unit))
- code, message = Messages.get_implicit_magnitude_conversion(_lhs_type_symbol, _rhs_type_symbol,
- _containing_expression.get_implicit_conversion_factor())
- Logger.log_message(code=code, message=message,
- error_position=_containing_expression.get_source_position(),
- log_level=LoggingLevel.INFO)
-
- @staticmethod
- def try_to_recover_or_error(_lhs_type_symbol, _rhs_type_symbol, _containing_expression, set_implicit_conversion_factor_on_lhs=False):
- if _rhs_type_symbol.is_castable_to(_lhs_type_symbol):
- if isinstance(_lhs_type_symbol, UnitTypeSymbol) \
- and isinstance(_rhs_type_symbol, UnitTypeSymbol):
- conversion_factor = UnitTypeSymbol.get_conversion_factor(_rhs_type_symbol.astropy_unit, _lhs_type_symbol.astropy_unit)
-
- if conversion_factor is None:
- # error during conversion
- code, message = Messages.get_type_different_from_expected(_lhs_type_symbol, _rhs_type_symbol)
- Logger.log_message(error_position=_containing_expression.get_source_position(),
- code=code, message=message, log_level=LoggingLevel.ERROR)
- return
-
- if set_implicit_conversion_factor_on_lhs and not conversion_factor == 1.:
- # the units are mutually convertible, but require a factor unequal to 1 (e.g. mV and A*Ohm)
- TypeCaster.do_magnitude_conversion_rhs_to_lhs(_rhs_type_symbol, _lhs_type_symbol, _containing_expression)
-
- # the units are mutually convertible (e.g. V and A*Ohm)
- code, message = Messages.get_implicit_cast_rhs_to_lhs(_rhs_type_symbol.print_symbol(),
- _lhs_type_symbol.print_symbol())
- Logger.log_message(error_position=_containing_expression.get_source_position(),
- code=code, message=message, log_level=LoggingLevel.INFO)
- return
-
- code, message = Messages.get_type_different_from_expected(_lhs_type_symbol, _rhs_type_symbol)
- Logger.log_message(error_position=_containing_expression.get_source_position(),
- code=code, message=message, log_level=LoggingLevel.ERROR)
diff --git a/pynestml/visitors/__init__.py b/pynestml/visitors/__init__.py
index 541cabdd1..f50261123 100644
--- a/pynestml/visitors/__init__.py
+++ b/pynestml/visitors/__init__.py
@@ -35,6 +35,7 @@
'ast_line_operation_visitor.py',
'ast_logical_not_visitor.py',
'ast_numeric_literal_visitor.py',
+ 'ast_parent_visitor',
'ast_parent_aware_visitor.py',
'ast_parentheses_visitor.py',
'ast_power_visitor.py',
diff --git a/pynestml/visitors/assign_implicit_conversion_factors_visitor.py b/pynestml/visitors/assign_implicit_conversion_factors_visitor.py
deleted file mode 100644
index bad89d52c..000000000
--- a/pynestml/visitors/assign_implicit_conversion_factors_visitor.py
+++ /dev/null
@@ -1,326 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# assign_implicit_conversion_factors_visitor.py
-#
-# This file is part of NEST.
-#
-# Copyright (C) 2004 The NEST Initiative
-#
-# NEST is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 2 of the License, or
-# (at your option) any later version.
-#
-# NEST is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with NEST. If not, see .
-
-from typing import Sequence, Union
-
-from pynestml.meta_model.ast_compound_stmt import ASTCompoundStmt
-from pynestml.meta_model.ast_declaration import ASTDeclaration
-from pynestml.meta_model.ast_inline_expression import ASTInlineExpression
-from pynestml.meta_model.ast_model import ASTModel
-from pynestml.meta_model.ast_node import ASTNode
-from pynestml.meta_model.ast_small_stmt import ASTSmallStmt
-from pynestml.meta_model.ast_stmt import ASTStmt
-from pynestml.symbols.error_type_symbol import ErrorTypeSymbol
-from pynestml.symbols.predefined_types import PredefinedTypes
-from pynestml.symbols.symbol import SymbolKind
-from pynestml.symbols.template_type_symbol import TemplateTypeSymbol
-from pynestml.symbols.variadic_type_symbol import VariadicTypeSymbol
-from pynestml.utils.ast_source_location import ASTSourceLocation
-from pynestml.utils.ast_utils import ASTUtils
-from pynestml.utils.logger import LoggingLevel, Logger
-from pynestml.utils.logging_helper import LoggingHelper
-from pynestml.utils.messages import Messages
-from pynestml.utils.type_caster import TypeCaster
-from pynestml.visitors.ast_visitor import ASTVisitor
-
-
-class AssignImplicitConversionFactorsVisitor(ASTVisitor):
- r"""
- Assign implicit conversion factors in expressions.
- """
-
- def visit_model(self, model: ASTModel):
- self.__assign_return_types(model)
-
- def visit_declaration(self, node):
- """
- Visits a single declaration and asserts that type of lhs is equal to type of rhs.
- :param node: a single declaration.
- :type node: ASTDeclaration
- """
- assert isinstance(node, ASTDeclaration)
- if node.has_expression():
- if node.get_expression().get_source_position().equals(ASTSourceLocation.get_added_source_position()):
- # no type checks are executed for added nodes, since we assume correctness
- return
- lhs_type = node.get_data_type().get_type_symbol()
- rhs_type = node.get_expression().type
- if isinstance(rhs_type, ErrorTypeSymbol):
- LoggingHelper.drop_missing_type_error(node)
- return
- if self.__types_do_not_match(lhs_type, rhs_type):
- TypeCaster.try_to_recover_or_error(lhs_type, rhs_type, node.get_expression(),
- set_implicit_conversion_factor_on_lhs=True)
-
- def visit_inline_expression(self, node):
- """
- Visits a single inline expression and asserts that type of lhs is equal to type of rhs.
- """
- assert isinstance(node, ASTInlineExpression)
- lhs_type = node.get_data_type().get_type_symbol()
- rhs_type = node.get_expression().type
- if isinstance(rhs_type, ErrorTypeSymbol):
- LoggingHelper.drop_missing_type_error(node)
- return
-
- if self.__types_do_not_match(lhs_type, rhs_type):
- TypeCaster.try_to_recover_or_error(lhs_type, rhs_type, node.get_expression(),
- set_implicit_conversion_factor_on_lhs=True)
-
- def visit_assignment(self, node):
- """
- Visits a single expression and assures that type(lhs) == type(rhs).
- :param node: a single assignment.
- :type node: ASTAssignment
- """
- from pynestml.meta_model.ast_assignment import ASTAssignment
- assert isinstance(node, ASTAssignment)
-
- if node.get_source_position().equals(ASTSourceLocation.get_added_source_position()):
- # no type checks are executed for added nodes, since we assume correctness
- return
- if node.is_direct_assignment: # case a = b is simple
- self.handle_simple_assignment(node)
- else:
- self.handle_compound_assignment(node) # e.g. a *= b
-
- def handle_compound_assignment(self, node):
- rhs_expr = node.get_expression()
- lhs_variable_symbol = node.get_variable().resolve_in_own_scope()
- rhs_type_symbol = rhs_expr.type
-
- if lhs_variable_symbol is None:
- code, message = Messages.get_equation_var_not_in_state_block(node.get_variable().get_complete_name())
- Logger.log_message(code=code, message=message, error_position=node.get_source_position(),
- log_level=LoggingLevel.ERROR)
- return
-
- if isinstance(rhs_type_symbol, ErrorTypeSymbol):
- LoggingHelper.drop_missing_type_error(node)
- return
-
- lhs_type_symbol = lhs_variable_symbol.get_type_symbol()
-
- if node.is_compound_product:
- if self.__types_do_not_match(lhs_type_symbol, lhs_type_symbol * rhs_type_symbol):
- TypeCaster.try_to_recover_or_error(lhs_type_symbol, lhs_type_symbol * rhs_type_symbol,
- node.get_expression(),
- set_implicit_conversion_factor_on_lhs=True)
- return
- return
-
- if node.is_compound_quotient:
- if self.__types_do_not_match(lhs_type_symbol, lhs_type_symbol / rhs_type_symbol):
- TypeCaster.try_to_recover_or_error(lhs_type_symbol, lhs_type_symbol / rhs_type_symbol,
- node.get_expression(),
- set_implicit_conversion_factor_on_lhs=True)
- return
- return
-
- assert node.is_compound_sum or node.is_compound_minus
- if self.__types_do_not_match(lhs_type_symbol, rhs_type_symbol):
- TypeCaster.try_to_recover_or_error(lhs_type_symbol, rhs_type_symbol,
- node.get_expression(),
- set_implicit_conversion_factor_on_lhs=True)
-
- @staticmethod
- def __types_do_not_match(lhs_type_symbol, rhs_type_symbol):
- if lhs_type_symbol is None:
- return True
-
- return not lhs_type_symbol.equals(rhs_type_symbol)
-
- def handle_simple_assignment(self, node):
- from pynestml.symbols.symbol import SymbolKind
- lhs_variable_symbol = node.get_scope().resolve_to_symbol(node.get_variable().get_complete_name(),
- SymbolKind.VARIABLE)
-
- rhs_type_symbol = node.get_expression().type
- if isinstance(rhs_type_symbol, ErrorTypeSymbol):
- LoggingHelper.drop_missing_type_error(node)
- return
-
- if lhs_variable_symbol is not None and self.__types_do_not_match(lhs_variable_symbol.get_type_symbol(),
- rhs_type_symbol):
- TypeCaster.try_to_recover_or_error(lhs_variable_symbol.get_type_symbol(), rhs_type_symbol,
- node.get_expression(),
- set_implicit_conversion_factor_on_lhs=True)
-
- def visit_function_call(self, node):
- """
- Check consistency for a single function call: check if the called function has been declared, whether the number and types of arguments correspond to the declaration, etc.
-
- :param node: a single function call.
- :type node: ASTFunctionCall
- """
- func_name = node.get_name()
-
- if func_name == 'convolve':
- return
-
- symbol = node.get_scope().resolve_to_symbol(node.get_name(), SymbolKind.FUNCTION)
-
- if symbol is None and ASTUtils.is_function_delay_variable(node):
- return
-
- # first check if the function has been declared
- if symbol is None:
- code, message = Messages.get_function_not_declared(node.get_name())
- Logger.log_message(error_position=node.get_source_position(), log_level=LoggingLevel.ERROR,
- code=code, message=message)
- return
-
- # check if the number of arguments is the same as in the symbol; accept anything for variadic types
- is_variadic: bool = len(symbol.get_parameter_types()) == 1 and isinstance(symbol.get_parameter_types()[0], VariadicTypeSymbol)
- if (not is_variadic) and len(node.get_args()) != len(symbol.get_parameter_types()):
- code, message = Messages.get_wrong_number_of_args(str(node), len(symbol.get_parameter_types()),
- len(node.get_args()))
- Logger.log_message(code=code, message=message, log_level=LoggingLevel.ERROR,
- error_position=node.get_source_position())
- return
-
- # finally check if the call is correctly typed
- expected_types = symbol.get_parameter_types()
- actual_args = node.get_args()
- actual_types = [arg.type for arg in actual_args]
- for actual_arg, actual_type, expected_type in zip(actual_args, actual_types, expected_types):
- if isinstance(actual_type, ErrorTypeSymbol):
- code, message = Messages.get_type_could_not_be_derived(actual_arg)
- Logger.log_message(code=code, message=message, log_level=LoggingLevel.ERROR,
- error_position=actual_arg.get_source_position())
- return
-
- if isinstance(expected_type, VariadicTypeSymbol):
- # variadic type symbol accepts anything
- return
-
- if not actual_type.equals(expected_type) and not isinstance(expected_type, TemplateTypeSymbol):
- TypeCaster.try_to_recover_or_error(expected_type, actual_type, actual_arg,
- set_implicit_conversion_factor_on_lhs=True)
-
- def __assign_return_types(self, _node):
- for userDefinedFunction in _node.get_functions():
- symbol = userDefinedFunction.get_scope().resolve_to_symbol(userDefinedFunction.get_name(),
- SymbolKind.FUNCTION)
- # first ensure that the block contains at least one statement
- if symbol is not None and len(userDefinedFunction.get_stmts_body().get_stmts()) > 0:
- # now check that the last statement is a return
- self.__check_return_recursively(userDefinedFunction,
- symbol.get_return_type(),
- userDefinedFunction.get_stmts_body().get_stmts(),
- False)
- # now if it does not have a statement, but uses a return type, it is an error
- elif symbol is not None and userDefinedFunction.has_return_type() and \
- not symbol.get_return_type().equals(PredefinedTypes.get_void_type()):
- code, message = Messages.get_no_return()
- Logger.log_message(node=_node, code=code, message=message,
- error_position=userDefinedFunction.get_source_position(),
- log_level=LoggingLevel.ERROR)
-
- def __check_return_recursively(self, processed_function, type_symbol=None, stmts=None, ret_defined: bool = False) -> None:
- """
- For a handed over statement, it checks if the statement is a return statement and if it is typed according to the handed over type symbol.
- :param type_symbol: a single type symbol
- :type type_symbol: type_symbol
- :param stmts: a list of statements, either simple or compound
- :type stmts: list(ASTSmallStmt,ASTCompoundStmt)
- :param ret_defined: indicates whether a ret has already been defined after this block of stmt, thus is not
- necessary. Implies that the return has been defined in the higher level block
- """
- # in order to ensure that in the sub-blocks, a return is not necessary, we check if the last one in this
- # block is a return statement, thus it is not required to have a return in the sub-blocks, but optional
- last_statement = stmts[len(stmts) - 1]
- ret_defined = False or ret_defined
- if (len(stmts) > 0 and isinstance(last_statement, ASTStmt)
- and last_statement.is_small_stmt()
- and last_statement.small_stmt.is_return_stmt()):
- ret_defined = True
-
- # now check that returns are there if necessary and correctly typed
- for c_stmt in stmts:
- if c_stmt.is_small_stmt():
- stmt = c_stmt.small_stmt
- else:
- stmt = c_stmt.compound_stmt
-
- # if it is a small statement, check if it is a return statement
- if isinstance(stmt, ASTSmallStmt) and stmt.is_return_stmt():
- # first check if the return is the last one in this block of statements
- if stmts.index(c_stmt) != (len(stmts) - 1):
- code, message = Messages.get_not_last_statement('Return')
- Logger.log_message(error_position=stmt.get_source_position(),
- code=code, message=message,
- log_level=LoggingLevel.WARNING)
-
- # now check that it corresponds to the declared type
- if stmt.get_return_stmt().has_expression() and type_symbol is PredefinedTypes.get_void_type():
- code, message = Messages.get_type_different_from_expected(PredefinedTypes.get_void_type(),
- stmt.get_return_stmt().get_expression().type)
- Logger.log_message(error_position=stmt.get_source_position(),
- message=message, code=code, log_level=LoggingLevel.ERROR)
-
- # if it is not void check if the type corresponds to the one stated
- if not stmt.get_return_stmt().has_expression() and \
- not type_symbol.equals(PredefinedTypes.get_void_type()):
- code, message = Messages.get_type_different_from_expected(PredefinedTypes.get_void_type(),
- type_symbol)
- Logger.log_message(error_position=stmt.get_source_position(),
- message=message, code=code, log_level=LoggingLevel.ERROR)
-
- if stmt.get_return_stmt().has_expression():
- type_of_return = stmt.get_return_stmt().get_expression().type
- if isinstance(type_of_return, ErrorTypeSymbol):
- code, message = Messages.get_type_could_not_be_derived(processed_function.get_name())
- Logger.log_message(error_position=stmt.get_source_position(),
- code=code, message=message, log_level=LoggingLevel.ERROR)
- elif not type_of_return.equals(type_symbol):
- TypeCaster.try_to_recover_or_error(type_symbol, type_of_return,
- stmt.get_return_stmt().get_expression(),
- set_implicit_conversion_factor_on_lhs=True)
- elif isinstance(stmt, ASTCompoundStmt):
- # otherwise it is a compound stmt, thus check recursively
- if stmt.is_if_stmt():
- self.__check_return_recursively(processed_function,
- type_symbol,
- stmt.get_if_stmt().get_if_clause().get_stmts_body().get_stmts(),
- ret_defined)
- for else_ifs in stmt.get_if_stmt().get_elif_clauses():
- self.__check_return_recursively(processed_function,
- type_symbol, else_ifs.get_stmts_body().get_stmts(), ret_defined)
- if stmt.get_if_stmt().has_else_clause():
- self.__check_return_recursively(processed_function,
- type_symbol,
- stmt.get_if_stmt().get_else_clause().get_stmts_body().get_stmts(),
- ret_defined)
- elif stmt.is_while_stmt():
- self.__check_return_recursively(processed_function,
- type_symbol, stmt.get_while_stmt().get_stmts_body().get_stmts(),
- ret_defined)
- elif stmt.is_for_stmt():
- self.__check_return_recursively(processed_function,
- type_symbol, stmt.get_for_stmt().get_stmts_body().get_stmts(),
- ret_defined)
- # now, if a return statement has not been defined in the corresponding higher level block, we have to ensure that it is defined here
- elif not ret_defined and stmts.index(c_stmt) == (len(stmts) - 1):
- if not (isinstance(stmt, ASTSmallStmt) and stmt.is_return_stmt()):
- code, message = Messages.get_no_return()
- Logger.log_message(error_position=stmt.get_source_position(), log_level=LoggingLevel.ERROR,
- code=code, message=message)
diff --git a/pynestml/visitors/ast_power_visitor.py b/pynestml/visitors/ast_power_visitor.py
index 2bd43aec4..26eddf664 100644
--- a/pynestml/visitors/ast_power_visitor.py
+++ b/pynestml/visitors/ast_power_visitor.py
@@ -19,7 +19,6 @@
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see .
-from pynestml.codegeneration.nest_unit_converter import NESTUnitConverter
from pynestml.meta_model.ast_expression import ASTExpression
from pynestml.meta_model.ast_simple_expression import ASTSimpleExpression
from pynestml.symbols.predefined_units import PredefinedUnits
@@ -108,7 +107,7 @@ def calculate_numeric_value(self, expr):
symbol = variable.get_scope().resolve_to_symbol(variable.get_complete_name(), SymbolKind.VARIABLE)
if symbol is None:
if PredefinedUnits.is_unit(variable.get_complete_name()):
- return NESTUnitConverter.get_factor(PredefinedUnits.get_unit(variable.get_complete_name()).get_unit())
+ return PredefinedUnits.get_unit(variable.get_complete_name()).get_unit()
raise Exception("Declaration for symbol '" + str(variable) + "' not found and is not a unit.")
diff --git a/tests/expression_type_calculation_test.py b/tests/expression_type_calculation_test.py
index b414e3792..9ed07eecf 100644
--- a/tests/expression_type_calculation_test.py
+++ b/tests/expression_type_calculation_test.py
@@ -22,7 +22,6 @@
import os
import pytest
-from pynestml.codegeneration.nest_unit_converter import NESTUnitConverter
from pynestml.symbol_table.symbol_table import SymbolTable
from pynestml.symbols.predefined_functions import PredefinedFunctions
from pynestml.symbols.predefined_types import PredefinedTypes
@@ -56,7 +55,7 @@ def endvisit_assignment(self, node):
if isinstance(_expr.type, UnitTypeSymbol):
message += " Neuroscience Factor: " + \
- str(NESTUnitConverter.get_factor(_expr.type.astropy_unit))
+ str(_expr.type.astropy_unit)
Logger.log_message(error_position=node.get_source_position(), code=MessageCode.TYPE_MISMATCH,
message=message, log_level=LoggingLevel.INFO)
diff --git a/tests/nest_tests/non_dimensionalisation_transformer/resources/izhikevich_neuron.nestml b/tests/nest_tests/non_dimensionalisation_transformer/resources/izhikevich_neuron.nestml
new file mode 100644
index 000000000..fb25c7f99
--- /dev/null
+++ b/tests/nest_tests/non_dimensionalisation_transformer/resources/izhikevich_neuron.nestml
@@ -0,0 +1,100 @@
+# izhikevich - Izhikevich neuron model
+# ####################################
+#
+# Description
+# +++++++++++
+#
+# Implementation of the simple spiking neuron model introduced by Izhikevich [1]_. The dynamics are given by:
+#
+# .. math::
+#
+# dV_{m}/dt &= 0.04 V_{m}^2 + 5 V_{m} + 140 - U_{m} + I\\
+# dU_{m}/dt &= a (b V_{m} - U_{m})
+#
+#
+# .. math::
+#
+# &\text{if}\;\; V_{m} \geq V_{th}:\\
+# &\;\;\;\; V_{m} \text{ is set to } c\\
+# &\;\;\;\; U_{m} \text{ is incremented by } d\\
+# & \, \\
+# &V_{m} \text{ jumps on each spike arrival by the weight of the spike}
+#
+# Incoming spikes cause an instantaneous jump in the membrane potential proportional to the strength of the synapse.
+#
+# As published in [1]_, the numerics differs from the standard forward Euler technique in two ways:
+#
+# 1) the new value of :math:`U_{m}` is calculated based on the new value of :math:`V_{m}`, rather than the previous value
+# 2) the variable :math:`V_{m}` is updated using a time step half the size of that used to update variable :math:`U_{m}`.
+#
+# This model will instead be simulated using the numerical solver that is recommended by ODE-toolbox during code generation.
+#
+#
+# References
+# ++++++++++
+#
+# .. [1] Izhikevich, Simple Model of Spiking Neurons, IEEE Transactions on Neural Networks (2003) 14:1569-1572
+#
+#
+# Copyright statement
+# +++++++++++++++++++
+#
+# This file is part of NEST.
+#
+# Copyright (C) 2004 The NEST Initiative
+#
+# NEST is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 2 of the License, or
+# (at your option) any later version.
+#
+# NEST is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with NEST. If not, see .
+#
+#
+model izhikevich_neuron:
+ state:
+ V_m real = V_m_init # Membrane potential
+ U_m real = bbb * V_m_init # Membrane potential recovery variable
+
+ equations:
+ V_m' = ( 0.04 * V_m * V_m + 5.0 * V_m + ( 140 - U_m ) + (I_e/pA) )/s
+ U_m' = aaa*(bbb*V_m-U_m)/s
+
+ parameters:
+ aaa real = 0.02 # describes time scale of recovery variable
+ bbb real = 0.2 # sensitivity of recovery variable
+ ccc real = -65 # after-spike reset value of V_m
+ ddd real = 8.0 # after-spike reset value of U_m
+ V_m_init real = -65 # initial membrane potential
+ V_th real = 30 # Threshold potential
+
+ # constant external input current
+ I_e pA = 0 pA
+
+ input:
+ exc_spikes <- excitatory spike
+ inh_spikes <- inhibitory spike
+
+ output:
+ spike
+
+ update:
+ integrate_odes()
+
+ onReceive(exc_spikes):
+ V_m += exc_spikes * s
+
+ onReceive(inh_spikes):
+ V_m += inh_spikes * s
+
+ onCondition(V_m >= V_th):
+ # threshold crossing
+ V_m = ccc
+ U_m += ddd
+ emit_spike()
diff --git a/tests/nest_tests/non_dimensionalisation_transformer/resources/non_dimensionalisation_transformer_test_neuron.nestml b/tests/nest_tests/non_dimensionalisation_transformer/resources/non_dimensionalisation_transformer_test_neuron.nestml
new file mode 100644
index 000000000..d43ff5604
--- /dev/null
+++ b/tests/nest_tests/non_dimensionalisation_transformer/resources/non_dimensionalisation_transformer_test_neuron.nestml
@@ -0,0 +1,63 @@
+model non_dimensionalisation_transformer_test_neuron:
+
+ state:
+ I_foo A = 42 mA
+ I_m A = 10 mA
+ V_3 mV = I_foo / 5 nS
+ V_m mV = E_L
+ U_m real = b * V_m_init # Membrane potential recovery variable
+ V_exp_der mV = 2500 uV + V_m_init * exp(alpha_exp * 10 V)
+ refr_t ms = 2 ms # Refractory period timer
+ I_eq mA = 30 mA
+
+ equations:
+ V_m' = I_eq / C_m
+ refr_t' = -1 / s
+ recordable inline I_spike_test A = 30.0 nS * (-V_m_init / 130e3) * exp(((-80 mV) - (-20 mV)) / 3000 uV)
+ V_exp_der' = (I_foo - 200 uA) / (C_exp_0 * (1+exp(alpha_exp * V_m_init)))
+
+ parameters:
+ E_L mV = -70 mV # Resting potential
+ C_m F = 250 pF * 1.0001 # Test if factor works
+ V_m_init mV = -65 mV # Initial membrane potential
+ C_exp_0 F = 150pF
+ alpha_exp 1/V = 2 /3 MV # this could be a factor for a voltage inside of en exp(), e.g. exp(alpha_exp * V_test)
+ b real = 0.2 # sensitivity of recovery variable
+
+ para_giga Ohm = 0.5 GOhm
+ para_mega Hz = 1.1 * 3MHz
+ para_kilo W = 2 kW
+ para_hecto Pa = 1024 hPa
+ para_deca m = 23 dam # this might cause problems, but also deca- is not used particularly frequently
+ para_deci mol = 8 dmol
+ para_centi m = 67 cm
+ para_milli V = 4 mV
+ para_micro S = 2 uS
+ para_nano F = 11 nF
+ para_pico H = 3 pH
+ para_femto A/m = 77 fA/m
+ para_atto s = 40 as
+
+
+ internals:
+ alpha_m_init real = ( 0.1 * ( V_m_init / mV + 40. ) ) / ( 1. - exp( -( V_m_init / mV + 40. ) / 10. ) )
+
+ update:
+ if refr_t > 2 ms:
+ # this has to do nothing as equations are not a real ODE system
+ integrate_odes(refr_t)
+
+ onCondition(refr_t > 2 ms):
+ refr_t = refr_t
+
+
+
+
+# V = 10 nA * 50 Ohm -> convert nA to mA --- 1E-6
+# = 500 nV
+# ---> V = 10 * 1E-6 * 50 = 500E-6
+
+# V = 10 * 0.00001 * mA * 50 Ohm -> convert mA to mA --- 1
+# ---> V = 10 * 0.00001 * 50 = 500E-6
+
+
diff --git a/tests/nest_tests/non_dimensionalisation_transformer/resources/test_function_call_in_equation_block.nestml b/tests/nest_tests/non_dimensionalisation_transformer/resources/test_function_call_in_equation_block.nestml
new file mode 100644
index 000000000..3420f0937
--- /dev/null
+++ b/tests/nest_tests/non_dimensionalisation_transformer/resources/test_function_call_in_equation_block.nestml
@@ -0,0 +1,14 @@
+model test_function_call_in_equation_block_transformation_neuron:
+ state:
+ V_m V = -70 mV
+
+ equations:
+ V_m' = -V_m / (tau_m * (1 + exp(alpha_exp * V_m_init)))
+
+ parameters:
+ V_m_init V = -65 mV # test potential
+ tau_m s = 12.85 ms # test time constant
+ alpha_exp 1/V = 2 /(70 GV) # test factor
+
+ update:
+ integrate_odes()
\ No newline at end of file
diff --git a/tests/nest_tests/non_dimensionalisation_transformer/resources/test_inline_expression_in_equation_block.nestml b/tests/nest_tests/non_dimensionalisation_transformer/resources/test_inline_expression_in_equation_block.nestml
new file mode 100644
index 000000000..ea35773e5
--- /dev/null
+++ b/tests/nest_tests/non_dimensionalisation_transformer/resources/test_inline_expression_in_equation_block.nestml
@@ -0,0 +1,13 @@
+model test_inline_expression_in_equation_block_transformation_neuron:
+ state:
+ I_test A = 1 mA
+
+ equations:
+ inline I_spike_test A = 0.0153846 mS * (-V_m_init) * exp(((-20 mV) - (-80 mV)) / 60 mV)
+ I_test' = I_spike_test * ms**-1
+
+ parameters:
+ V_m_init V = -65 mV # Initial membrane potential
+
+ update:
+ integrate_odes()
\ No newline at end of file
diff --git a/tests/nest_tests/non_dimensionalisation_transformer/resources/test_internals_block.nestml b/tests/nest_tests/non_dimensionalisation_transformer/resources/test_internals_block.nestml
new file mode 100644
index 000000000..8e0f1a4e4
--- /dev/null
+++ b/tests/nest_tests/non_dimensionalisation_transformer/resources/test_internals_block.nestml
@@ -0,0 +1,24 @@
+model non_dimensionalisation_transformer_test_internals_block_neuron:
+ state:
+ Act_n real = alpha_n_init / ( alpha_n_init + beta_n_init )
+
+ parameters:
+ C_m pF = 100 pF # Membrane capacitance
+ g_Na nS = 3500 nS # Sodium peak conductance
+ g_K nS = 900 nS # Potassium peak conductance
+ g_L nS = 10 nS # Leak conductance
+ E_Na mV = 55 mV # Sodium reversal potential
+ E_K mV = -90 mV # Potassium reversal potential
+ E_L mV = -65 mV # Leak reversal potential (aka resting potential)
+ V_Tr mV = -55 mV # Spike threshold
+ refr_T ms = 2 ms # Duration of refractory period
+
+ tau_syn_exc ms = 0.2 ms # Rise time of the excitatory synaptic alpha function
+ tau_syn_inh ms = 10 ms # Rise time of the inhibitory synaptic alpha function
+ E_exc mV = 0 mV # Excitatory synaptic reversal potential
+ E_inh mV = -75 mV # Inhibitory synaptic reversal potential
+
+ internals:
+ alpha_n_init 1/ms = -0.05/(ms*mV) * (E_L + 34.0 mV) / (exp(-0.1 * (E_L + 34.0 mV)) - 1.0)
+ beta_n_init 1/ms = 0.625/ms * exp(-(E_L + 44.0 mV) / 80.0 mV)
+
diff --git a/tests/nest_tests/non_dimensionalisation_transformer/resources/test_metric_prefix_transformation.nestml b/tests/nest_tests/non_dimensionalisation_transformer/resources/test_metric_prefix_transformation.nestml
new file mode 100644
index 000000000..bcefe7170
--- /dev/null
+++ b/tests/nest_tests/non_dimensionalisation_transformer/resources/test_metric_prefix_transformation.nestml
@@ -0,0 +1,17 @@
+model test_metric_prefix_transformation_neuron:
+ parameters:
+ para_femto A = 30fA
+ para_atto aH = 40aH
+ para_giga Ohm = 0.5 GOhm
+ para_mega Hz = 1.1 * 3MHz
+ para_kilo W = 2 kW
+ para_hecto Pa = 1024 hPa
+ para_deca m = 23 dam
+ para_deci mol = 8 dmol
+ para_centi m = 67 cm
+ para_milli V = 4 mV
+ para_micro S = 2 uS
+ para_nano F = 11 nF
+ para_pico H = 3 pH
+ # para_femto A / m = 77 fA / m # this causes problems as unit division on the LHS is not allowed currently in the initial parsing
+ # para_atto as = 40 as # This causes problems as the string 'as' is not being added to the predefined units as it is not in dir(u.si), probably because of conflict with python keyword 'as' used in imports
\ No newline at end of file
diff --git a/tests/nest_tests/non_dimensionalisation_transformer/resources/test_real_factor_in_state_block.nestml b/tests/nest_tests/non_dimensionalisation_transformer/resources/test_real_factor_in_state_block.nestml
new file mode 100644
index 000000000..3de718733
--- /dev/null
+++ b/tests/nest_tests/non_dimensionalisation_transformer/resources/test_real_factor_in_state_block.nestml
@@ -0,0 +1,7 @@
+model test_real_factor_in_state_block_transformation_neuron:
+ state:
+ U_m real = bbb * V_m_init # Membrane potential recovery variable
+
+ parameters:
+ V_m_init mV = -65 mV # Initial membrane potential
+ bbb real = 0.2 # sensitivity of recovery variable
diff --git a/tests/nest_tests/non_dimensionalisation_transformer/resources/test_reciprocal_units_in_parameter_block.nestml b/tests/nest_tests/non_dimensionalisation_transformer/resources/test_reciprocal_units_in_parameter_block.nestml
new file mode 100644
index 000000000..e6c3a0d83
--- /dev/null
+++ b/tests/nest_tests/non_dimensionalisation_transformer/resources/test_reciprocal_units_in_parameter_block.nestml
@@ -0,0 +1,4 @@
+model test_reciprocal_units_in_parameter_block_transformation_neuron:
+ parameters:
+ alpha_exp 1/V = 2 /3 MV # this could be a factor for a voltage inside of en exp(), e.g. exp(alpha_exp * V_test)
+
diff --git a/tests/nest_tests/non_dimensionalisation_transformer/tests/test_forward_euler_integrator.py b/tests/nest_tests/non_dimensionalisation_transformer/tests/test_forward_euler_integrator.py
new file mode 100644
index 000000000..de245790d
--- /dev/null
+++ b/tests/nest_tests/non_dimensionalisation_transformer/tests/test_forward_euler_integrator.py
@@ -0,0 +1,110 @@
+# -*- coding: utf-8 -*-
+#
+# test_forward_euler_integrator.py
+#
+# This file is part of NEST.
+#
+# Copyright (C) 2004 The NEST Initiative
+#
+# NEST is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 2 of the License, or
+# (at your option) any later version.
+#
+# NEST is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with NEST. If not, see .
+
+"""
+This test should passes with the modified Izhikevich model for Unit consistency.
+"""
+
+import numpy as np
+import os
+import pytest
+
+import nest
+
+from pynestml.codegeneration.nest_tools import NESTTools
+from pynestml.frontend.pynestml_frontend import generate_nest_target
+
+
+class TestForwardEulerIntegrator:
+ """
+ Tests the forward Euler integrator by comparing it to RK45.
+ """
+
+ def generate_target(self, numeric_solver: str):
+ r"""Generate the neuron model code"""
+
+ input_path = os.path.join(
+ os.path.realpath(
+ os.path.join(
+ os.path.dirname(__file__),
+ "../resources",
+ "izhikevich_neuron.nestml",
+ )
+ )
+ )
+ generate_nest_target(
+ input_path=input_path,
+ logging_level="DEBUG",
+ suffix="_" + numeric_solver.replace("-", "_") + "_nestml",
+ module_name="nestml" + numeric_solver.replace("-", "_") + "module",
+ codegen_opts={
+ "numeric_solver": numeric_solver,
+ "quantity_to_preferred_prefix": {
+ "electrical potential": "m", # needed for V_m_init and U_m
+ "electrical current": "p",
+ "electrical resistance": "G",
+ "time": "m",
+ },
+ },
+ )
+
+ return "nestml" + numeric_solver.replace("-", "_") + "module"
+
+ @pytest.mark.skipif(
+ NESTTools.detect_nest_version().startswith("v2"),
+ reason="This test does not support NEST 2",
+ )
+ def test_forward_euler_integrator(self):
+ forward_euler_module_name = self.generate_target("forward-Euler")
+ rk45_module_name = self.generate_target("rk45")
+
+ nest.ResetKernel()
+ nest.Install(forward_euler_module_name)
+ nest.Install(rk45_module_name)
+ nest.resolution = 0.001
+
+ nrn1 = nest.Create("izhikevich_neuron_rk45_nestml")
+ nrn2 = nest.Create("izhikevich_neuron_forward_Euler_nestml")
+
+ nrn1.I_e = 10.0
+ nrn2.I_e = 10.0
+
+ mm1 = nest.Create("multimeter")
+ mm1.set({"record_from": ["V_m"]})
+ recorder_rk45 = nest.Create("spike_recorder")
+
+ mm2 = nest.Create("multimeter")
+ mm2.set({"record_from": ["V_m"]})
+ recorder_fe = nest.Create("spike_recorder")
+
+ nest.Connect(mm1, nrn1)
+ nest.Connect(mm2, nrn2)
+ nest.Connect(nrn1, recorder_rk45)
+ nest.Connect(nrn2, recorder_fe)
+
+ nest.Simulate(100.0)
+
+ v_m1 = mm1.get("events")["V_m"]
+ v_m2 = mm2.get("events")["V_m"]
+
+ np.testing.assert_allclose(
+ v_m1, v_m2, atol=2, rtol=0
+ ) # allow max 2 mV difference between the solutions
diff --git a/tests/nest_tests/non_dimensionalisation_transformer/tests/test_non_dim_transformer_function_call_in_equation_block.py b/tests/nest_tests/non_dimensionalisation_transformer/tests/test_non_dim_transformer_function_call_in_equation_block.py
new file mode 100644
index 000000000..343544de0
--- /dev/null
+++ b/tests/nest_tests/non_dimensionalisation_transformer/tests/test_non_dim_transformer_function_call_in_equation_block.py
@@ -0,0 +1,92 @@
+# -*- coding: utf-8 -*-
+#
+# test_non_dim_transformer_function_call_in_equation_block.py
+#
+# This file is part of NEST.
+#
+# Copyright (C) 2004 The NEST Initiative
+#
+# NEST is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 2 of the License, or
+# (at your option) any later version.
+#
+# NEST is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with NEST. If not, see .
+
+
+import numpy as np
+from scipy.integrate import solve_ivp
+import matplotlib.pyplot as plt
+
+# parameters (SI)
+params = {
+ "tau_m": 12.85e-3, # membrane time constant (s)
+ "alpha_exp": 2 / 70e6, # exponential factor (1/V)
+ "V_rest": -65e-3, # resting potential (V)
+}
+
+V_m0 = -70e-3 # start 5mV below rest
+
+tau_eff = params["tau_m"] * (1 + np.exp(params["alpha_exp"] * params["V_rest"]))
+
+
+# ODE
+def neuron_ode(t, v):
+ return -v / tau_eff
+
+
+# simulation: 0–50ms
+t_span = (0.0, 0.05) # s
+t_eval = np.linspace(*t_span, 1001)
+
+# sol = solve_ivp(neuron_ode, t_span, [V_m0],
+# t_eval=t_eval, rtol=1e-9, atol=1e-12)
+sol = solve_ivp(neuron_ode, t_span, [V_m0], t_eval=t_eval, rtol=1e-6, atol=1e-6)
+
+# checkpoints
+check_times_ms = np.array([25, 50]) # ms
+check_idx = [np.argmin(np.abs(t_eval * 1e3 - ct)) for ct in check_times_ms]
+check_vm_mV = sol.y[0, check_idx] * 1e3 # mV
+
+# plot
+plt.figure(figsize=(8, 5))
+
+# membrane‑potential trace
+plt.plot(t_eval * 1e3, sol.y[0] * 1e3, label="numeric (solve_ivp)")
+
+# Xs at checkpoints
+plt.plot(
+ check_times_ms,
+ check_vm_mV,
+ "x",
+ markersize=9,
+ markeredgewidth=2,
+ label="checkpoints",
+)
+
+# annotate Xs with their values
+for t, v in zip(check_times_ms, check_vm_mV):
+ offset = 2 if v > 0 else -2
+ plt.text(
+ t,
+ v + offset,
+ f"{v:+.2f}mV",
+ ha="center",
+ va="bottom" if v > 0 else "top",
+ fontsize=9,
+ )
+
+plt.xlabel("Time (ms)")
+plt.ylabel("Membrane potential (mV)")
+plt.title("50ms leak‑decay reference")
+plt.grid(alpha=0.6, linestyle="--")
+plt.legend()
+plt.tight_layout()
+plt.savefig("reference_test_non_dim_transformer_function_call_in_equation_block.png")
+print("test")
diff --git a/tests/nest_tests/non_dimensionalisation_transformer/tests/test_non_dimensionalisation_transformer_function_call_in_equation_block.py b/tests/nest_tests/non_dimensionalisation_transformer/tests/test_non_dimensionalisation_transformer_function_call_in_equation_block.py
new file mode 100644
index 000000000..48b34c573
--- /dev/null
+++ b/tests/nest_tests/non_dimensionalisation_transformer/tests/test_non_dimensionalisation_transformer_function_call_in_equation_block.py
@@ -0,0 +1,98 @@
+# -*- coding: utf-8 -*-
+#
+# test_non_dimensionalisation_transformer_function_call_in_equation_block.py
+#
+# This file is part of NEST.
+#
+# Copyright (C) 2004 The NEST Initiative
+#
+# NEST is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 2 of the License, or
+# (at your option) any later version.
+#
+# NEST is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with NEST. If not, see .
+
+import nest
+import numpy as np
+import scipy as sp
+import os
+import pytest
+
+from pynestml.frontend.pynestml_frontend import generate_nest_target
+
+
+class TestNonDimensionalisationTransformerEqationBlock:
+ def generate_code(self, codegen_opts=None):
+ input_path = os.path.join(
+ os.path.realpath(
+ os.path.join(
+ os.path.dirname(__file__),
+ "../resources",
+ "test_function_call_in_equation_block.nestml",
+ )
+ )
+ )
+ target_path = "target"
+ logging_level = "DEBUG"
+ module_name = "nestmlmodule"
+ suffix = "_nestml"
+
+ nest.set_verbosity("M_ALL")
+ generate_nest_target(
+ input_path,
+ target_path=target_path,
+ logging_level=logging_level,
+ module_name=module_name,
+ suffix=suffix,
+ codegen_opts=codegen_opts,
+ )
+
+ def test_exp_in_equationblock(self):
+ """
+ This test checks if the transformer can deal with functions like exp() in the equation block
+ V_m' (s) is a time dependent voltage.
+
+ The target unit for V_exp'(s) is mV as the 1/s is being carried implicitly by declaring the variable with a tick, signifying that it is a derived unit with respect to time
+ """
+ codegen_opts = {
+ "solver": "numeric",
+ "quantity_to_preferred_prefix": {
+ "electrical potential": "m", # needed for V_m_init and V_exp'
+ # "electrical current": "n", # needed for I_foo
+ # "electrical capacitance": "p", # needed for C_exp_0
+ # "electrical resistance": "M",
+ # "frequency": "k",
+ # "power": "M",
+ # "pressure": "k",
+ # "length": "1",
+ # "amount of substance": "1",
+ # "electrical conductance": "m",
+ # "inductance": "n",
+ "time": "m",
+ },
+ }
+ self.generate_code(codegen_opts)
+
+ nest.ResetKernel()
+ nest.Install("nestmlmodule")
+ nest.resolution = 1
+ nrn = nest.Create(
+ "test_function_call_in_equation_block_transformation_neuron_nestml"
+ )
+ mm = nest.Create("multimeter")
+ nest.SetStatus(mm, {"record_from": ["V_m"]})
+ nest.Connect(mm, nrn)
+ nest.Simulate(500.0)
+
+ assert nrn.V_m_init == -65 # mV
+ assert nrn.tau_m == 12.85 # mS
+ assert nrn.alpha_exp == 2 / (70.0 * 1.0e09) # 1/V
+ V_m_end = mm.get("events")["V_m"]
+ np.allclose(V_m_end[0], -67.3, atol=0.05)
diff --git a/tests/nest_tests/non_dimensionalisation_transformer/tests/test_non_dimensionalisation_transformer_inline_expression_in_equationblock.py b/tests/nest_tests/non_dimensionalisation_transformer/tests/test_non_dimensionalisation_transformer_inline_expression_in_equationblock.py
new file mode 100644
index 000000000..269e27d0a
--- /dev/null
+++ b/tests/nest_tests/non_dimensionalisation_transformer/tests/test_non_dimensionalisation_transformer_inline_expression_in_equationblock.py
@@ -0,0 +1,120 @@
+# -*- coding: utf-8 -*-
+#
+# test_non_dimensionalisation_transformer_inline_expression_in_equationblock.py
+#
+# This file is part of NEST.
+#
+# Copyright (C) 2004 The NEST Initiative
+#
+# NEST is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 2 of the License, or
+# (at your option) any later version.
+#
+# NEST is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with NEST. If not, see .
+
+import nest
+import numpy as np
+import scipy as sp
+import os
+import pytest
+
+from pynestml.frontend.pynestml_frontend import generate_nest_target
+
+
+class TestNonDimensionalisationTransformerInlineEquationBlock:
+ r"""
+ This test checks if the transformer can deal with inline expressions in the equation block. Additionally there is an exp() in the expression.
+
+ The target unit JSON file is
+ ```JSON
+ {"quantity_to_preferred_prefix":
+ {
+ "electrical potential": "m", # needed for V_m_init
+ "electrical current": "p", # needed for I_spike_test
+ "electrical capacitance": "1", # needed for caps not part of the test
+ }
+ }
+ ```
+
+ Before the transformation the relevant .NESTML should read
+ ```NESTML
+ equations:
+ inline I_spike_test A = 30.0 nS * (-V_m_init / 130e3) * exp(((-80 mV) - (-20 mV)) / 3000 uV)
+
+ parameters:
+ V_m_init mV = -65 mV # Initial membrane potential
+ ```
+
+ After the transformation it should read
+ ```NESTML
+ equations:
+ inline I_spike_test real = 1e12 * ((30.0 * 1e-9) * ((-V_m_init * * 1e-3)/ 130e3) * exp(((-80 * 1e-3) - (-20 * 1e-3)) / (3000 * 1e-6)))
+
+ parameters:
+ V_m_init real = 1e3 * (-65 * 1e-3) # Initial membrane potential
+ ```
+ """
+
+ def generate_code(self, codegen_opts=None):
+ input_path = os.path.join(
+ os.path.realpath(
+ os.path.join(
+ os.path.dirname(__file__),
+ "../resources",
+ "test_inline_expression_in_equation_block.nestml",
+ )
+ )
+ )
+ target_path = "target"
+ logging_level = "DEBUG"
+ module_name = "nestmlmodule"
+ suffix = "_nestml"
+
+ nest.set_verbosity("M_ALL")
+ generate_nest_target(
+ input_path,
+ target_path=target_path,
+ logging_level=logging_level,
+ module_name=module_name,
+ suffix=suffix,
+ codegen_opts=codegen_opts,
+ )
+
+ def test_inline_expression_in_equationblock(self):
+ """
+ This test checks if the transformer can deal with inline expressions in the equation block
+ Additionally there is an exp() in the expression
+ """
+ codegen_opts = {
+ "quantity_to_preferred_prefix": {
+ "electrical potential": "m", # needed for voltages not part of the test
+ "electrical current": "m", # needed for currents not part of the test
+ "electrical conductance": "m",
+ "time": "m",
+ }
+ }
+ self.generate_code(codegen_opts)
+
+ nest.ResetKernel()
+ nest.Install("nestmlmodule")
+ nrn = nest.Create(
+ "test_inline_expression_in_equation_block_transformation_neuron_nestml"
+ )
+ mm = nest.Create("multimeter")
+ nest.SetStatus(mm, {"record_from": ["I_test"]})
+ nest.Connect(mm, nrn)
+ nest.Simulate(10.0)
+
+ I_spike = 2.718279110177217 # slope
+ I0 = 1 # t(0) value
+ t_ms = 9.0 # t end
+ expected = I0 + I_spike * t_ms # expected value
+
+ assert np.allclose(mm.get("events")["I_test"][8], expected, atol=1e-9)
diff --git a/tests/nest_tests/non_dimensionalisation_transformer/tests/test_non_dimensionalisation_transformer_internals_block.py b/tests/nest_tests/non_dimensionalisation_transformer/tests/test_non_dimensionalisation_transformer_internals_block.py
new file mode 100644
index 000000000..5d565337f
--- /dev/null
+++ b/tests/nest_tests/non_dimensionalisation_transformer/tests/test_non_dimensionalisation_transformer_internals_block.py
@@ -0,0 +1,92 @@
+# -*- coding: utf-8 -*-
+#
+# test_non_dimensionalisation_transformer_internals_block.py
+#
+# This file is part of NEST.
+#
+# Copyright (C) 2004 The NEST Initiative
+#
+# NEST is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 2 of the License, or
+# (at your option) any later version.
+#
+# NEST is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with NEST. If not, see .
+
+import nest
+import numpy as np
+import scipy as sp
+import os
+import pytest
+
+from pynestml.frontend.pynestml_frontend import generate_nest_target
+
+
+class TestNonDimensionalisationTransformerInternalsBlock:
+ r"""
+ This test checks if the transformer can deal with transforming the expressions inside the internals block
+ """
+
+ def generate_code(self, codegen_opts=None):
+ input_path = os.path.join(
+ os.path.realpath(
+ os.path.join(
+ os.path.dirname(__file__),
+ "../resources",
+ "test_internals_block.nestml",
+ )
+ )
+ )
+ target_path = "target"
+ logging_level = "DEBUG"
+ module_name = "nestmlmodule"
+ suffix = "_nestml"
+
+ nest.set_verbosity("M_ALL")
+ generate_nest_target(
+ input_path,
+ target_path=target_path,
+ logging_level=logging_level,
+ module_name=module_name,
+ suffix=suffix,
+ codegen_opts=codegen_opts,
+ )
+
+ def test_internals_block(self):
+ codegen_opts = {
+ "quantity_to_preferred_prefix": {
+ "electrical potential": "m", # needed for V_m_init and U_m
+ "electrical current": "1", # needed for currents not part of the test
+ "electrical capacitance": "1", # needed for caps not part of the test
+ "electrical resistance": "M",
+ "frequency": "k",
+ "power": "M",
+ "pressure": "k",
+ "length": "1",
+ "amount of substance": "1",
+ "electrical conductance": "m",
+ "inductance": "n",
+ "time": "m",
+ }
+ }
+ self.generate_code(codegen_opts)
+
+ nest.ResetKernel()
+ nest.Install("nestmlmodule")
+
+ nrn = nest.Create(
+ "non_dimensionalisation_transformer_test_internals_block_neuron_nestml"
+ )
+ mm = nest.Create("multimeter")
+
+ nest.Connect(mm, nrn)
+
+ nest.Simulate(10.0)
+
+ np.testing.assert_almost_equal(nrn.Act_n, 0.99837490295)
diff --git a/tests/nest_tests/non_dimensionalisation_transformer/tests/test_non_dimensionalisation_transformer_metric_prefixes.py b/tests/nest_tests/non_dimensionalisation_transformer/tests/test_non_dimensionalisation_transformer_metric_prefixes.py
new file mode 100644
index 000000000..960b8d7c6
--- /dev/null
+++ b/tests/nest_tests/non_dimensionalisation_transformer/tests/test_non_dimensionalisation_transformer_metric_prefixes.py
@@ -0,0 +1,93 @@
+# -*- coding: utf-8 -*-
+#
+# test_non_dimensionalisation_transformer_metric_prefixes.py
+#
+# This file is part of NEST.
+#
+# Copyright (C) 2004 The NEST Initiative
+#
+# NEST is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 2 of the License, or
+# (at your option) any later version.
+#
+# NEST is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with NEST. If not, see .
+
+import nest
+import numpy as np
+import scipy as sp
+import os
+import pytest
+
+from pynestml.frontend.pynestml_frontend import generate_nest_target
+
+
+class TestNonDimensionalisationTransformer:
+
+ r"""
+ Test Metric Prefixes
+ These tests will check if the standardized metric prefixes in the range of Giga- to Atto- can be resolved.
+ The prefixes Deci- and Deca- are probably little used in a neuroscience context.
+ The test for Femto- includes the use of a combined physical type, the "magnetic field strength".
+
+ """
+ @pytest.fixture(scope="module", autouse=True)
+ def generate_code_metric_prefixes(self, codegen_opts=None):
+ input_path = os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), "../resources", "test_metric_prefix_transformation.nestml")))
+ target_path = "target"
+ logging_level = "DEBUG"
+ module_name = "nestmlmodule"
+ suffix = "_nestml"
+
+ codegen_opts = {"quantity_to_preferred_prefix": {"electrical potential": "m", # needed for V_m_init and U_m
+ "electrical current": "1", # needed for currents not part of the test
+ "electrical capacitance": "1", # needed for caps not part of the test
+ "electrical resistance": "M",
+ "frequency": "k",
+ "power": "M",
+ "pressure": "k",
+ "length": "1",
+ "amount of substance": "1",
+ "electrical conductance": "m",
+ "inductance": "n",
+ "time": "f"}}
+
+ nest.set_verbosity("M_ALL")
+ generate_nest_target(input_path,
+ target_path=target_path,
+ logging_level=logging_level,
+ module_name=module_name,
+ suffix=suffix,
+ codegen_opts=codegen_opts)
+
+ @pytest.mark.parametrize("para_name, expected", [("para_giga", 500), ("para_mega", 3300), ("para_kilo", 0.002), ("para_hecto", 102.4), ("para_deca", 230), ("para_deci", 0.8), ("para_centi", 0.67), ("para_milli", 4), ("para_micro", 0.002), ("para_nano", 1.1e-8), ("para_pico", 0.003), ("para_femto", 30e-15), ("para_atto", 4e-8)])
+ def test_metric_prefixes(self, para_name, expected):
+ """
+ This test checks if the transformer can deal with all metric prefixes in the range of Giga- to Atto- can be resolved and the corresponding factor found.
+
+ These tests will check if the standardized metric prefixes in the range of Giga- to Atto- can be resolved.
+ The prefixes Deci- and Deca- are probably little used in a neuroscience context.
+ The test for Femto- includes the use of a combined physical type, the "magnetic field strength".
+ """
+
+ nest.ResetKernel()
+ nest.Install("nestmlmodule")
+
+ nrn = nest.Create("test_metric_prefix_transformation_neuron_nestml")
+ mm = nest.Create("multimeter")
+ # nest.SetStatus(mm, {"record_from": [para_name]})
+
+ nest.Connect(mm, nrn)
+
+ nest.Simulate(10.)
+
+ res = nrn.get(para_name)
+
+ np.testing.assert_allclose(res, expected)
+ pass
diff --git a/tests/nest_tests/non_dimensionalisation_transformer/tests/test_non_dimensionalisation_transformer_real_factor_in_stateblock.py b/tests/nest_tests/non_dimensionalisation_transformer/tests/test_non_dimensionalisation_transformer_real_factor_in_stateblock.py
new file mode 100644
index 000000000..ea2c3b0af
--- /dev/null
+++ b/tests/nest_tests/non_dimensionalisation_transformer/tests/test_non_dimensionalisation_transformer_real_factor_in_stateblock.py
@@ -0,0 +1,135 @@
+# -*- coding: utf-8 -*-
+#
+# test_non_dimensionalisation_transformer_real_factor_in_stateblock.py
+#
+# This file is part of NEST.
+#
+# Copyright (C) 2004 The NEST Initiative
+#
+# NEST is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 2 of the License, or
+# (at your option) any later version.
+#
+# NEST is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with NEST. If not, see .
+
+import nest
+import numpy as np
+import scipy as sp
+import os
+import pytest
+
+from pynestml.frontend.pynestml_frontend import generate_nest_target
+
+
+class TestNonDimensionalisationTransformerStateBlock:
+ r"""
+ This test checks if state block expressions with a RHS with a unit being multiplied by a real factor and a LHS with type 'real' will get processed correctly.
+
+ This test checks if state block expressions with a RHS with a unit being multiplied by a real factor a LHS with type 'real' will get processed correctly.
+
+ The target unit JSON file is
+ ```JSON
+ {"quantity_to_preferred_prefix":
+ {
+ "electrical potential": "m", # needed for V_m_init and U_m
+ "electrical current": "1", # needed for currents not part of the test
+ "electrical capacitance": "1", # needed for caps not part of the test
+ }
+ }
+ ```
+ Before the transformation the relevant .NESTML should read
+
+ ```NESTML
+ state:
+ U_m real = b * V_m_init # Membrane potential recovery variable
+
+ parameters:
+ b real = 0.2 # sensitivity of recovery variable
+ V_m_init mV = -65 mV # Initial membrane potential
+ ```
+ After the transformation it should read
+ ```NESTML
+ state:
+ U_m real = b * V_m_init # Membrane potential recovery variable
+
+ parameters:
+ b real = 0.2 # sensitivity of recovery variable
+ V_m_init real = 1e3 * (-65e-3) # Initial membrane potential
+ ```
+
+ """
+
+ def generate_code(self, codegen_opts=None):
+ input_path = os.path.join(
+ os.path.realpath(
+ os.path.join(
+ os.path.dirname(__file__),
+ "../resources",
+ "test_real_factor_in_state_block.nestml",
+ )
+ )
+ )
+ target_path = "target"
+ logging_level = "DEBUG"
+ module_name = "nestmlmodule"
+ suffix = "_nestml"
+
+ nest.set_verbosity("M_ALL")
+ generate_nest_target(
+ input_path,
+ target_path=target_path,
+ logging_level=logging_level,
+ module_name=module_name,
+ suffix=suffix,
+ codegen_opts=codegen_opts,
+ )
+
+ def test_real_factor_in_stateblock(self):
+ r"""
+ This test checks if state block expressions with
+ a RHS with a unit being multiplied by a real factor and
+ a LHS with type 'real'
+ will get processed correctly
+ """
+ codegen_opts = {
+ "quantity_to_preferred_prefix": {
+ "electrical potential": "k", # needed for V_m_init and U_m
+ "electrical current": "1",
+ # needed for currents not part of the test
+ "electrical capacitance": "1",
+ # needed for caps not part of the test
+ "electrical resistance": "M",
+ "frequency": "k",
+ "power": "M",
+ "pressure": "k",
+ "length": "1",
+ "amount of substance": "1",
+ "electrical conductance": "m",
+ "inductance": "n",
+ "time": "f",
+ }
+ }
+ self.generate_code(codegen_opts)
+
+ nest.ResetKernel()
+ nest.Install("nestmlmodule")
+
+ nrn = nest.Create(
+ "test_real_factor_in_state_block_transformation_neuron_nestml"
+ )
+ mm = nest.Create("multimeter")
+
+ nest.Connect(mm, nrn)
+
+ nest.Simulate(10.0)
+
+ V_m_init = nrn.get("V_m_init")
+
+ np.testing.assert_allclose(V_m_init, -65e-6)
diff --git a/tests/nest_tests/non_dimensionalisation_transformer/tests/test_non_dimensionalisation_transformer_reciprocal_unit_in_parameterblock.py b/tests/nest_tests/non_dimensionalisation_transformer/tests/test_non_dimensionalisation_transformer_reciprocal_unit_in_parameterblock.py
new file mode 100644
index 000000000..08266a1a4
--- /dev/null
+++ b/tests/nest_tests/non_dimensionalisation_transformer/tests/test_non_dimensionalisation_transformer_reciprocal_unit_in_parameterblock.py
@@ -0,0 +1,127 @@
+# -*- coding: utf-8 -*-
+#
+# test_non_dimensionalisation_transformer_reciprocal_unit_in_parameterblock.py
+#
+# This file is part of NEST.
+#
+# Copyright (C) 2004 The NEST Initiative
+#
+# NEST is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 2 of the License, or
+# (at your option) any later version.
+#
+# NEST is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with NEST. If not, see .
+
+import nest
+import numpy as np
+import scipy as sp
+import os
+import re
+import pytest
+
+from pynestml.frontend.pynestml_frontend import generate_nest_target
+
+
+class TestNonDimensionalisationTransformerStateBlock:
+ r"""
+ This test checks if the transformer can deal with reciprocal units on the LHS of an equation inside the parameter block
+
+ The target unit JSON file is
+ ```JSON
+ {"quantity_to_preferred_prefix":
+ {
+ "electrical potential": "m", # needed for V_exp, alpha_exp
+ "electrical current": "1", # needed for I_spike_test
+ "electrical capacitance": "1", # needed for caps not part of the test
+ }
+ }
+ ```
+ Before the transformation the relevant .NESTML should read
+ ```NESTML
+ state:
+ V_exp V = 2500 uV + V_m_init * exp(alpha_exp * 10 V)
+
+ parameters:
+ V_m_init mV = -65 mV # Initial membrane potential
+ alpha_exp 1/V = 2 /(3 MV) # this could be a factor for a voltage inside of en exp(), e.g. exp(alpha_exp * V_test)
+ ```
+
+ After the transformation it should read
+ ```NESTML
+ state:
+ V_exp V = (2500 * 1e-6) + (V_m_init * 1e-3) * exp((alpha_exp * 1e-6) * 10)
+
+ parameters:
+ V_m_init real = 1e3 * (-65 * 1e-3) # Initial membrane potential
+ alpha_exp real = 1e-3 * (2 / (3 * 1e6)) # this could be a factor for a voltage inside of en exp(), e.g. exp(alpha_exp * V_test)
+ ```
+
+ TODO: The grammar needs to be changed for reciprocal units to be accepted on LHSs
+ """
+
+ def generate_code(self, codegen_opts=None):
+ input_path = os.path.join(
+ os.path.realpath(
+ os.path.join(
+ os.path.dirname(__file__),
+ "../resources",
+ "test_reciprocal_units_in_parameter_block.nestml",
+ )
+ )
+ )
+ target_path = "target"
+ logging_level = "DEBUG"
+ module_name = "nestmlmodule"
+ suffix = ""
+
+ nest.set_verbosity("M_ALL")
+ generate_nest_target(
+ input_path,
+ target_path=target_path,
+ logging_level=logging_level,
+ module_name=module_name,
+ suffix=suffix,
+ codegen_opts=codegen_opts,
+ )
+
+ def test_reciprocal_unit_in_parameterblock(self):
+ codegen_opts = {
+ "quantity_to_preferred_prefix": {
+ "electrical potential": "m", # needed for V_m_init and U_m
+ "electrical current": "1", # needed for currents not part of the test
+ "electrical capacitance": "1", # needed for caps not part of the test
+ "electrical resistance": "M",
+ "frequency": "k",
+ "power": "M",
+ "pressure": "k",
+ "length": "1",
+ "amount of substance": "1",
+ "electrical conductance": "m",
+ "inductance": "n",
+ "time": "f",
+ }
+ }
+ self.generate_code(codegen_opts)
+
+ nest.ResetKernel()
+ nest.Install("nestmlmodule")
+
+ nrn = nest.Create(
+ "test_reciprocal_units_in_parameter_block_transformation_neuron"
+ )
+ mm = nest.Create("multimeter")
+
+ nest.Connect(mm, nrn)
+
+ nest.Simulate(10.0)
+
+ np.testing.assert_almost_equal(
+ nrn.get("alpha_exp"), 6.667e-7
+ ) # should be (2e-10/3) (1/mV)
diff --git a/tests/nest_tests/resources/iaf_psc_exp_neuron.nestml b/tests/nest_tests/resources/iaf_psc_exp_neuron.nestml
new file mode 100644
index 000000000..73f6fe1ff
--- /dev/null
+++ b/tests/nest_tests/resources/iaf_psc_exp_neuron.nestml
@@ -0,0 +1,108 @@
+# iaf_psc_exp - Leaky integrate-and-fire neuron model
+# ###################################################
+#
+# Description
+# +++++++++++
+#
+# iaf_psc_exp is an implementation of a leaky integrate-and-fire model
+# with exponentially decaying synaptic currents according to [1]_.
+# Thus, postsynaptic currents have an infinitely short rise time.
+#
+# The threshold crossing is followed by an absolute refractory period
+# during which the membrane potential is clamped to the resting potential
+# and spiking is prohibited.
+#
+# The general framework for the consistent formulation of systems with
+# neuron like dynamics interacting by point events is described in
+# [1]_. A flow chart can be found in [2]_.
+#
+# Critical tests for the formulation of the neuron model are the
+# comparisons of simulation results for different computation step
+# sizes.
+#
+# .. note::
+# If tau_m is very close to tau_syn_exc or tau_syn_inh, numerical problems
+# may arise due to singularities in the propagator matrics. If this is
+# the case, replace equal-valued parameters by a single parameter.
+#
+# For details, please see ``IAF_neurons_singularity.ipynb`` in
+# the NEST source code (``docs/model_details``).
+#
+#
+# References
+# ++++++++++
+#
+# .. [1] Rotter S, Diesmann M (1999). Exact simulation of
+# time-invariant linear systems with applications to neuronal
+# modeling. Biologial Cybernetics 81:381-402.
+# DOI: https://doi.org/10.1007/s004220050570
+# .. [2] Diesmann M, Gewaltig M-O, Rotter S, & Aertsen A (2001). State
+# space analysis of synchronous spiking in cortical neural
+# networks. Neurocomputing 38-40:565-571.
+# DOI: https://doi.org/10.1016/S0925-2312(01)00409-X
+# .. [3] Morrison A, Straube S, Plesser H E, Diesmann M (2006). Exact
+# subthreshold integration with continuous spike times in discrete time
+# neural network simulations. Neural Computation, in press
+# DOI: https://doi.org/10.1162/neco.2007.19.1.47
+#
+#
+# See also
+# ++++++++
+#
+# iaf_psc_delta, iaf_psc_alpha, iaf_cond_exp
+#
+#
+model iaf_psc_exp_neuron:
+
+ state:
+ V_m mV = E_L # Membrane potential
+ refr_t ms = 0 ms # Refractory period timer
+ I_syn_exc pA = 0 pA
+ I_syn_inh pA = 0 pA
+
+ equations:
+ I_syn_exc' = -I_syn_exc / tau_syn_exc
+ I_syn_inh' = -I_syn_inh / tau_syn_inh
+ V_m' = -(V_m - E_L) / tau_m + (I_syn_exc - I_syn_inh + I_e + I_stim) / C_m
+ refr_t' = -1e3 * ms/s # refractoriness is implemented as an ODE, representing a timer counting back down to zero. XXX: TODO: This should simply read ``refr_t' = -1 / s`` (see https://github.com/nest/nestml/issues/984)
+
+ parameters:
+ C_m pF = 250 pF # Capacitance of the membrane
+ tau_m ms = 10 ms # Membrane time constant
+ tau_syn_inh ms = 2 ms # Time constant of inhibitory synaptic current
+ tau_syn_exc ms = 2 ms # Time constant of excitatory synaptic current
+ refr_T ms = 2 ms # Duration of refractory period
+ E_L mV = -70 mV # Resting potential
+ V_reset mV = -70 mV # Reset value of the membrane potential
+ V_th mV = -55 mV # Spike threshold potential
+
+ # constant external input current
+ I_e pA = 0 pA
+
+ input:
+ exc_spikes <- excitatory spike
+ inh_spikes <- inhibitory spike
+ I_stim pA <- continuous
+
+ output:
+ spike
+
+ update:
+ if refr_t > 0 ms:
+ # neuron is absolute refractory, do not evolve V_m
+ integrate_odes(I_syn_exc, I_syn_inh, refr_t)
+ else:
+ # neuron not refractory
+ integrate_odes(I_syn_exc, I_syn_inh, V_m)
+
+ onReceive(exc_spikes):
+ I_syn_exc += exc_spikes * pA * s
+
+ onReceive(inh_spikes):
+ I_syn_inh += inh_spikes * pA * s
+
+ onCondition(refr_t <= 0 ms and V_m >= V_th):
+ # threshold crossing
+ refr_t = refr_T # start of the refractory period
+ V_m = V_reset
+ emit_spike()
diff --git a/tests/nest_tests/resources/iaf_psc_exp_neuron_NO_ISTIM.nestml b/tests/nest_tests/resources/iaf_psc_exp_neuron_NO_ISTIM.nestml
new file mode 100644
index 000000000..36d194a81
--- /dev/null
+++ b/tests/nest_tests/resources/iaf_psc_exp_neuron_NO_ISTIM.nestml
@@ -0,0 +1,130 @@
+# iaf_psc_exp - Leaky integrate-and-fire neuron model
+# ###################################################
+#
+# Description
+# +++++++++++
+#
+# iaf_psc_exp is an implementation of a leaky integrate-and-fire model
+# with exponentially decaying synaptic currents according to [1]_.
+# Thus, postsynaptic currents have an infinitely short rise time.
+# The input current I_stim is removed for code generation testing purposes.
+#
+# The threshold crossing is followed by an absolute refractory period
+# during which the membrane potential is clamped to the resting potential
+# and spiking is prohibited.
+#
+# The general framework for the consistent formulation of systems with
+# neuron like dynamics interacting by point events is described in
+# [1]_. A flow chart can be found in [2]_.
+#
+# Critical tests for the formulation of the neuron model are the
+# comparisons of simulation results for different computation step
+# sizes.
+#
+# .. note::
+#
+# If tau_m is very close to tau_syn_exc or tau_syn_inh, numerical problems
+# may arise due to singularities in the propagator matrics. If this is
+# the case, replace equal-valued parameters by a single parameter.
+#
+# For details, please see ``IAF_neurons_singularity.ipynb`` in
+# the NEST source code (``docs/model_details``).
+#
+#
+# References
+# ++++++++++
+#
+# .. [1] Rotter S, Diesmann M (1999). Exact simulation of
+# time-invariant linear systems with applications to neuronal
+# modeling. Biologial Cybernetics 81:381-402.
+# DOI: https://doi.org/10.1007/s004220050570
+# .. [2] Diesmann M, Gewaltig M-O, Rotter S, & Aertsen A (2001). State
+# space analysis of synchronous spiking in cortical neural
+# networks. Neurocomputing 38-40:565-571.
+# DOI: https://doi.org/10.1016/S0925-2312(01)00409-X
+# .. [3] Morrison A, Straube S, Plesser H E, Diesmann M (2006). Exact
+# subthreshold integration with continuous spike times in discrete time
+# neural network simulations. Neural Computation, in press
+# DOI: https://doi.org/10.1162/neco.2007.19.1.47
+#
+#
+# See also
+# ++++++++
+#
+# iaf_psc_delta, iaf_psc_alpha, iaf_cond_exp
+#
+#
+# Copyright statement
+# +++++++++++++++++++
+#
+# This file is part of NEST.
+#
+# Copyright (C) 2004 The NEST Initiative
+#
+# NEST is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 2 of the License, or
+# (at your option) any later version.
+#
+# NEST is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with NEST. If not, see .
+#
+#
+model iaf_psc_exp_neuron:
+
+ state:
+ V_m mV = E_L # Membrane potential
+ refr_t ms = 0 ms # Refractory period timer
+ I_syn_exc pA = 0 pA
+ I_syn_inh pA = 0 pA
+
+ equations:
+ I_syn_exc' = -I_syn_exc / tau_syn_exc
+ I_syn_inh' = -I_syn_inh / tau_syn_inh
+ V_m' = -(V_m - E_L) / tau_m + (I_syn_exc - I_syn_inh + I_e) / C_m
+ refr_t' = -1e3 * ms/s # refractoriness is implemented as an ODE, representing a timer counting back down to zero. XXX: TODO: This should simply read ``refr_t' = -1 / s`` (see https://github.com/nest/nestml/issues/984)
+
+ parameters:
+ C_m pF = 250 pF # Capacitance of the membrane
+ tau_m ms = 10 ms # Membrane time constant
+ tau_syn_inh ms = 2 ms # Time constant of inhibitory synaptic current
+ tau_syn_exc ms = 2 ms # Time constant of excitatory synaptic current
+ refr_T ms = 2 ms # Duration of refractory period
+ E_L mV = -70 mV # Resting potential
+ V_reset mV = -70 mV # Reset value of the membrane potential
+ V_th mV = -55 mV # Spike threshold potential
+
+ # constant external input current
+ I_e pA = 0 pA
+
+ input:
+ exc_spikes <- excitatory spike
+ inh_spikes <- inhibitory spike
+
+ output:
+ spike
+
+ update:
+ if refr_t > 0 ms:
+ # neuron is absolute refractory, do not evolve V_m
+ integrate_odes(I_syn_exc, I_syn_inh, refr_t)
+ else:
+ # neuron not refractory
+ integrate_odes(I_syn_exc, I_syn_inh, V_m)
+
+ onReceive(exc_spikes):
+ I_syn_exc += exc_spikes * pA * s
+
+ onReceive(inh_spikes):
+ I_syn_inh += inh_spikes * pA * s
+
+ onCondition(refr_t <= 0 ms and V_m >= V_th):
+ # threshold crossing
+ refr_t = refr_T # start of the refractory period
+ V_m = V_reset
+ emit_spike()
diff --git a/tests/nest_tests/resources/non_dimensionalisation_transformer_test_neuron.nestml b/tests/nest_tests/resources/non_dimensionalisation_transformer_test_neuron.nestml
new file mode 100644
index 000000000..230969bad
--- /dev/null
+++ b/tests/nest_tests/resources/non_dimensionalisation_transformer_test_neuron.nestml
@@ -0,0 +1,66 @@
+model non_dimensionalisation_transformer_test_neuron:
+
+ state:
+ I_foo A = 42 mA
+ I_m A = 10 mA
+ V_3 mV = I_foo / 5 nS
+ V_m mV = E_L
+ U_m real = b * V_m_init # Membrane potential recovery variable
+ V_exp mV = 2500 uV + V_m_init * exp(alpha_exp * 10 V)
+ refr_t ms = 2 ms # Refractory period timer
+ I_eq A = 30 mA
+
+ equations:
+ # V_m' = I_eq / C_m
+ V_m' = I_eq / C_m
+ refr_t' s = -1 / s
+ inline I_spike_test A = 30.0 nS * (-V_m_init / 130e3) * exp(((-80 mV) - (-20 mV)) / 3000 uV)
+ # V_exp_der' = (I_foo - 200uA) / (C_exp_0 * (1+exp(alpha_exp * V_m_init)))
+ V_exp_der' = (I_foo - 200uA) / (C_exp_0 * (1+exp(alpha_exp * V_m_init)))
+
+ parameters:
+ E_L mV = -70 mV # Resting potential
+ C_m F = 250 pF * 1.0001 # Test if factor works
+ V_m_init mV = -65 mV # Initial membrane potential
+ C_exp_0 F = 150pF
+ alpha_exp = 2 /3 MV # this could be a factor for a voltage inside of en exp(), e.g. exp(alpha_exp * V_test)
+ b real = 0.2 # sensitivity of recovery variable
+
+ para_giga Ohm = 0.5 GOhm
+ para_mega Hz = 1.1 * 3MHz
+ para_kilo W = 2 kW
+ para_hecto Pa = 1024 hPa
+ para_deca m = 23 dam # this might cause problems, but also deca- is not used particularly frequently
+ para_deci mol = 8 dmol
+ para_centi m = 67 cm
+ para_milli V = 4 mV
+ para_micro S = 2 uS
+ para_nano F = 11 nF
+ para_pico H = 3 pH
+ # para_femto A/m = 77 fA/m
+ para_atto s = 40 as
+
+
+ internals:
+ alpha_m_init real = ( 0.1 * ( V_m_init / mV + 40. ) ) / ( 1. - exp( -( V_m_init / mV + 40. ) / 10. ) )
+
+ update:
+ if refr_t > 2 ms:
+ # this has to do nothing as equations are not a real ODE system
+ integrate_odes(refr_t)
+
+ onCondition(refr_t > 2 ms):
+ #this should never be reached
+ refr_t = refr_t
+
+
+
+
+# V = 10 nA * 50 Ohm -> convert nA to mA --- 1E-6
+# = 500 nV
+# ---> V = 10 * 1E-6 * 50 = 500E-6
+
+# V = 10 * 0.00001 * mA * 50 Ohm -> convert mA to mA --- 1
+# ---> V = 10 * 0.00001 * 50 = 500E-6
+
+
diff --git a/tests/nest_tests/test_forward_euler_integrator.py b/tests/nest_tests/test_forward_euler_integrator.py
index 1c539ea15..0ea13eb8c 100644
--- a/tests/nest_tests/test_forward_euler_integrator.py
+++ b/tests/nest_tests/test_forward_euler_integrator.py
@@ -57,26 +57,28 @@ def test_forward_euler_integrator(self):
nest.ResetKernel()
nest.Install(forward_euler_module_name)
nest.Install(rk45_module_name)
- nest.resolution = .001
+ nest.resolution = 0.001
nrn1 = nest.Create("izhikevich_neuron_rk45_nestml")
nrn2 = nest.Create("izhikevich_neuron_forward_Euler_nestml")
- nrn1.I_e = 10.
- nrn2.I_e = 10.
+ nrn1.I_e = 10.0
+ nrn2.I_e = 10.0
mm1 = nest.Create("multimeter")
mm1.set({"record_from": ["V_m"]})
+ recorder_rk45 = nest.Create("spike_recorder")
mm2 = nest.Create("multimeter")
mm2.set({"record_from": ["V_m"]})
+ recorder_fe = nest.Create("spike_recorder")
nest.Connect(mm1, nrn1)
nest.Connect(mm2, nrn2)
+ nest.Connect(nrn1, recorder_rk45)
+ nest.Connect(nrn2, recorder_fe)
- nest.Simulate(100.)
+ nest.Simulate(100.0)
v_m1 = mm1.get("events")["V_m"]
v_m2 = mm2.get("events")["V_m"]
-
- np.testing.assert_allclose(v_m1, v_m2, atol=2, rtol=0) # allow max 2 mV difference between the solutions
diff --git a/tests/python_standalone_tests/test_neuron_build_and_sim_analytic.py b/tests/python_standalone_tests/test_neuron_build_and_sim_analytic.py
index f00de8374..62897035f 100644
--- a/tests/python_standalone_tests/test_neuron_build_and_sim_analytic.py
+++ b/tests/python_standalone_tests/test_neuron_build_and_sim_analytic.py
@@ -40,7 +40,15 @@ def test_python_standalone_neuron_build_and_sim_analytic(self):
logging_level = "INFO"
suffix = ""
module_name = "nestmlmodule"
- codegen_opts = {}
+ codegen_opts = { "quantity_to_preferred_prefix": {
+ "electrical potential": "m",
+ "electrical current": "p",
+ "electrical resistance": "G",
+ "electrical capacitance": "p",
+ "electrical conductance": "n",
+ "time": "m",
+ },
+}
generate_python_standalone_target(input_path, target_path,
module_name=module_name,
diff --git a/tests/python_standalone_tests/test_neuron_build_and_sim_numeric.py b/tests/python_standalone_tests/test_neuron_build_and_sim_numeric.py
index ba0213579..965354b73 100644
--- a/tests/python_standalone_tests/test_neuron_build_and_sim_numeric.py
+++ b/tests/python_standalone_tests/test_neuron_build_and_sim_numeric.py
@@ -35,10 +35,18 @@ def test_python_standalone_neuron_build_and_sim_numeric(self):
input_path = os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), os.path.join(
os.pardir, os.pardir, "models", "neurons", "aeif_cond_exp_neuron.nestml"))))
target_path = "nestmlmodule"
- logging_level = "INFO"
+ logging_level = "DEBUG"
suffix = ""
module_name = "nestmlmodule"
- codegen_opts = {}
+ codegen_opts = { "quantity_to_preferred_prefix": {
+ "electrical potential": "m",
+ "electrical current": "p",
+ "electrical capacitance": "p",
+ "electrical resistance": "M",
+ "electrical conductance": "n",
+ "time": "m",
+ },
+}
generate_python_standalone_target(input_path, target_path,
module_name=module_name,