From ffb7bda030010d5ca21b1422fbe2f56434830c98 Mon Sep 17 00:00:00 2001 From: LeanderEwert <116215464+LeanderEwert@users.noreply.github.com> Date: Fri, 29 Sep 2023 11:51:38 +0200 Subject: [PATCH] Compartmental models in NESTML (#872) --- .github/workflows/nestml-build.yml | 71 +- doc/requirements.txt | 2 +- doc/running/index.rst | 12 +- doc/running/running_nest_compartmental.rst | 152 +++ extras/convert_cm_default_to_template.py | 152 +++ pynestml/cocos/co_co_cm_channel_model.py | 36 + .../cocos/co_co_cm_concentration_model.py | 37 + pynestml/cocos/co_co_cm_synapse_model.py | 37 + pynestml/cocos/co_co_v_comp_exists.py | 80 ++ pynestml/cocos/co_cos_manager.py | 35 +- .../nest_code_generator_utils.py | 3 +- .../nest_compartmental_code_generator.py | 941 ++++++++++++++++++ .../printers/nest_variable_printer.py | 9 +- .../CommonPropertiesDictionaryReader.jinja2 | 9 + .../cm_neuron/@NEURON_NAME@.cpp.jinja2 | 357 +++++++ .../cm_neuron/@NEURON_NAME@.h.jinja2 | 342 +++++++ .../cm_neuron/__init__.py | 24 + ...mpartmentcurrents_@NEURON_NAME@.cpp.jinja2 | 417 ++++++++ ...compartmentcurrents_@NEURON_NAME@.h.jinja2 | 470 +++++++++ .../cm_tree_@NEURON_NAME@.cpp.jinja2 | 515 ++++++++++ .../cm_neuron/cm_tree_@NEURON_NAME@.h.jinja2 | 250 +++++ .../cm_neuron/directives_cpp | 1 + .../cm_neuron/setup/@MODULE_NAME@.cpp.jinja2 | 126 +++ .../cm_neuron/setup/@MODULE_NAME@.h.jinja2 | 92 ++ .../cm_neuron/setup/CMakeLists.txt.jinja2 | 290 ++++++ .../cm_neuron/setup/__init__.py | 24 + pynestml/frontend/frontend_configuration.py | 1 + pynestml/frontend/pynestml_frontend.py | 58 +- pynestml/generated/PyNestMLLexer.py | 4 +- pynestml/generated/PyNestMLParser.interp | 239 ----- pynestml/generated/PyNestMLParser.py | 907 +++++++++-------- pynestml/generated/PyNestMLParserVisitor.py | 2 +- pynestml/grammars/PyNestMLParser.g4 | 4 +- pynestml/meta_model/ast_inline_expression.py | 16 +- pynestml/meta_model/ast_node_factory.py | 12 +- pynestml/meta_model/ast_ode_equation.py | 17 +- .../ast_mechanism_information_collector.py | 457 +++++++++ .../ast_synapse_information_collector.py | 349 +++++++ pynestml/utils/ast_utils.py | 48 +- pynestml/utils/chan_info_enricher.py | 59 ++ pynestml/utils/channel_processing.py | 88 ++ pynestml/utils/conc_info_enricher.py | 28 + pynestml/utils/concentration_processing.py | 104 ++ pynestml/utils/logger.py | 11 +- pynestml/utils/mechanism_processing.py | 215 ++++ pynestml/utils/mechs_info_enricher.py | 175 ++++ pynestml/utils/messages.py | 217 +++- pynestml/utils/synapse_processing.py | 231 +++++ pynestml/utils/syns_info_enricher.py | 348 +++++++ pynestml/visitors/ast_builder_visitor.py | 20 +- pynestml/visitors/ast_symbol_table_visitor.py | 32 +- requirements.txt | 2 +- setup.py | 3 + tests/cocos_test.py | 847 ++++++++++------ ...tdp_synapse_missing_delay_decorator.nestml | 79 ++ tests/nest_compartmental_tests/cocos_test.py | 125 +++ .../compartmental_model_test.py | 522 ++++++++++ .../concmech_model_test.py | 113 +++ .../resources/cm_default.nestml | 152 +++ .../resources/concmech.nestml | 196 ++++ .../invalid/CoCoCmVariableHasRhs.nestml | 57 ++ .../invalid/CoCoCmVariableMultiUse.nestml | 57 ++ .../invalid/CoCoCmVariablesDeclared.nestml | 57 ++ .../invalid/CoCoCmVcompExists.nestml | 60 ++ .../valid/CoCoCmVariableHasRhs.nestml | 57 ++ .../valid/CoCoCmVariableMultiUse.nestml | 57 ++ .../valid/CoCoCmVariablesDeclared.nestml | 64 ++ .../resources/valid/CoCoCmVcompExists.nestml | 61 ++ tests/nest_tests/nest_multisynapse_test.py | 2 +- 69 files changed, 9596 insertions(+), 1041 deletions(-) create mode 100644 doc/running/running_nest_compartmental.rst create mode 100644 extras/convert_cm_default_to_template.py create mode 100644 pynestml/cocos/co_co_cm_channel_model.py create mode 100644 pynestml/cocos/co_co_cm_concentration_model.py create mode 100644 pynestml/cocos/co_co_cm_synapse_model.py create mode 100644 pynestml/cocos/co_co_v_comp_exists.py create mode 100644 pynestml/codegeneration/nest_compartmental_code_generator.py create mode 100644 pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/CommonPropertiesDictionaryReader.jinja2 create mode 100644 pynestml/codegeneration/resources_nest_compartmental/cm_neuron/@NEURON_NAME@.cpp.jinja2 create mode 100644 pynestml/codegeneration/resources_nest_compartmental/cm_neuron/@NEURON_NAME@.h.jinja2 create mode 100644 pynestml/codegeneration/resources_nest_compartmental/cm_neuron/__init__.py create mode 100644 pynestml/codegeneration/resources_nest_compartmental/cm_neuron/cm_compartmentcurrents_@NEURON_NAME@.cpp.jinja2 create mode 100644 pynestml/codegeneration/resources_nest_compartmental/cm_neuron/cm_compartmentcurrents_@NEURON_NAME@.h.jinja2 create mode 100644 pynestml/codegeneration/resources_nest_compartmental/cm_neuron/cm_tree_@NEURON_NAME@.cpp.jinja2 create mode 100644 pynestml/codegeneration/resources_nest_compartmental/cm_neuron/cm_tree_@NEURON_NAME@.h.jinja2 create mode 120000 pynestml/codegeneration/resources_nest_compartmental/cm_neuron/directives_cpp create mode 100644 pynestml/codegeneration/resources_nest_compartmental/cm_neuron/setup/@MODULE_NAME@.cpp.jinja2 create mode 100644 pynestml/codegeneration/resources_nest_compartmental/cm_neuron/setup/@MODULE_NAME@.h.jinja2 create mode 100644 pynestml/codegeneration/resources_nest_compartmental/cm_neuron/setup/CMakeLists.txt.jinja2 create mode 100644 pynestml/codegeneration/resources_nest_compartmental/cm_neuron/setup/__init__.py delete mode 100644 pynestml/generated/PyNestMLParser.interp create mode 100644 pynestml/utils/ast_mechanism_information_collector.py create mode 100644 pynestml/utils/ast_synapse_information_collector.py create mode 100644 pynestml/utils/chan_info_enricher.py create mode 100644 pynestml/utils/channel_processing.py create mode 100644 pynestml/utils/conc_info_enricher.py create mode 100644 pynestml/utils/concentration_processing.py create mode 100644 pynestml/utils/mechanism_processing.py create mode 100644 pynestml/utils/mechs_info_enricher.py create mode 100644 pynestml/utils/synapse_processing.py create mode 100644 pynestml/utils/syns_info_enricher.py create mode 100644 tests/invalid/stdp_synapse_missing_delay_decorator.nestml create mode 100644 tests/nest_compartmental_tests/cocos_test.py create mode 100644 tests/nest_compartmental_tests/compartmental_model_test.py create mode 100644 tests/nest_compartmental_tests/concmech_model_test.py create mode 100644 tests/nest_compartmental_tests/resources/cm_default.nestml create mode 100644 tests/nest_compartmental_tests/resources/concmech.nestml create mode 100644 tests/nest_compartmental_tests/resources/invalid/CoCoCmVariableHasRhs.nestml create mode 100644 tests/nest_compartmental_tests/resources/invalid/CoCoCmVariableMultiUse.nestml create mode 100644 tests/nest_compartmental_tests/resources/invalid/CoCoCmVariablesDeclared.nestml create mode 100644 tests/nest_compartmental_tests/resources/invalid/CoCoCmVcompExists.nestml create mode 100644 tests/nest_compartmental_tests/resources/valid/CoCoCmVariableHasRhs.nestml create mode 100644 tests/nest_compartmental_tests/resources/valid/CoCoCmVariableMultiUse.nestml create mode 100644 tests/nest_compartmental_tests/resources/valid/CoCoCmVariablesDeclared.nestml create mode 100644 tests/nest_compartmental_tests/resources/valid/CoCoCmVcompExists.nestml diff --git a/.github/workflows/nestml-build.yml b/.github/workflows/nestml-build.yml index c38b6ab1d..4795e7d62 100644 --- a/.github/workflows/nestml-build.yml +++ b/.github/workflows/nestml-build.yml @@ -118,7 +118,7 @@ jobs: done; exit $rc - build_and_test: + build_and_test_nest: needs: [static_checks] runs-on: ubuntu-latest strategy: @@ -247,3 +247,72 @@ jobs: done; cd $GITHUB_WORKSPACE exit $rc + + + build_and_test_nest_compartmental: + needs: [static_checks] + runs-on: ubuntu-latest + strategy: + matrix: + nest_branch: ["master"] + fail-fast: false + steps: + # Checkout the repository contents + - name: Checkout NESTML code + uses: actions/checkout@v3 + + # Setup Python version + - name: Setup Python 3.9 + uses: actions/setup-python@v4 + with: + python-version: 3.9 + + # Install dependencies + - name: Install apt dependencies + run: | + sudo apt-get update + sudo apt-get install libltdl7-dev libgsl0-dev libncurses5-dev libreadline6-dev pkg-config + sudo apt-get install python3-all-dev python3-matplotlib python3-numpy python3-scipy ipython3 + + # Install Python dependencies + - name: Python dependencies + run: | + python -m pip install --upgrade pip pytest jupyterlab matplotlib pycodestyle scipy pandas + python -m pip install -r requirements.txt + + # Install NEST simulator + - name: NEST simulator + run: | + python -m pip install cython + echo "GITHUB_WORKSPACE = $GITHUB_WORKSPACE" + NEST_SIMULATOR=$(pwd)/nest-simulator + NEST_INSTALL=$(pwd)/nest_install + echo "NEST_SIMULATOR = $NEST_SIMULATOR" + echo "NEST_INSTALL = $NEST_INSTALL" + + git clone --depth=1 https://github.com/nest/nest-simulator --branch ${{ matrix.nest_branch }} + mkdir nest_install + echo "NEST_INSTALL=$NEST_INSTALL" >> $GITHUB_ENV + cd nest_install + cmake -DCMAKE_INSTALL_PREFIX=$NEST_INSTALL $NEST_SIMULATOR + make && make install + cd .. + + # Install NESTML (repeated) + - name: Install NESTML + run: | + export PYTHONPATH=${{ env.PYTHONPATH }}:${{ env.NEST_INSTALL }}/lib/python3.9/site-packages + #echo PYTHONPATH=`pwd` >> $GITHUB_ENV + echo "PYTHONPATH=$PYTHONPATH" >> $GITHUB_ENV + python setup.py install + + # Integration tests + - name: Run integration tests + env: + LD_LIBRARY_PATH: ${{ env.NEST_INSTALL }}/lib/nest + run: | + rc=0 + for fn in $GITHUB_WORKSPACE/tests/nest_compartmental_tests/*.py; do + pytest -s -o log_cli=true -o log_cli_level="DEBUG" ${fn} || rc=1 + done; + exit $rc diff --git a/doc/requirements.txt b/doc/requirements.txt index 2a5bd8427..9f8178e9b 100644 --- a/doc/requirements.txt +++ b/doc/requirements.txt @@ -15,7 +15,7 @@ sphinx-design numpy >= 1.8.2 scipy sympy >= 1.1.1,!= 1.11, != 1.11.1 -antlr4-python3-runtime == 4.10 +antlr4-python3-runtime == 4.12 setuptools Jinja2 >= 2.10 typing;python_version<"3.5" diff --git a/doc/running/index.rst b/doc/running/index.rst index 84431c4c5..870ccb4f4 100644 --- a/doc/running/index.rst +++ b/doc/running/index.rst @@ -13,7 +13,7 @@ Supported target platforms Currently, the following target platforms are supported for code generation. Click on each for further information: -.. grid:: 3 +.. grid:: 2 .. grid-item-card:: :text-align: center @@ -23,6 +23,16 @@ Currently, the following target platforms are supported for code generation. Cli |nest_logo| + .. grid-item-card:: + :text-align: center + :class-title: sd-d-flex-row sd-align-minor-center + + :doc:`NEST Simulator (compartmental) ` + + |nest_logo| + +.. grid:: 2 + .. grid-item-card:: :text-align: center :class-title: sd-d-flex-row sd-align-minor-center diff --git a/doc/running/running_nest_compartmental.rst b/doc/running/running_nest_compartmental.rst new file mode 100644 index 000000000..05aba37d6 --- /dev/null +++ b/doc/running/running_nest_compartmental.rst @@ -0,0 +1,152 @@ +NEST Simulator compartmental target +################################### + +Generate code for neuron models with complex dendritic structure. + +Introduction +------------ + +NEST Simulator implements compartmental neuron models. The structure of the neuron -- soma, dendrites, axon -- is user-defined at runtime by adding compartments through ``nest.SetStatus()``. Each compartment can be assigned receptors, also through ``nest.SetStatus()``. + +The default model is passive, but sodium and potassium currents can be added by passing non-zero conductances ``g_Na`` and ``g_K`` with the parameter dictionary when adding compartments. Receptors can be AMPA and/or NMDA (excitatory), and GABA (inhibitory). Ion channel and receptor currents to the compartments can be customized through NESTML. + +For usage information and more details, see the NEST Simulator documentation on compartmental models at https://nest-simulator.readthedocs.io/en/stable/models/cm_default.html. + + +Writing a compartmental NESTML model +------------------------------------ + +Defining the membrane potential variable +---------------------------------------- + +One variable in the model represents the local membrane potential in a compartment. By default, it is called ``v_comp``. (This name is defined in the compartmental code generator options as the ``compartmental_variable_name`` option.). This variable needs to be defined as a state in any compartmental model to be referenced in the equations describing channels and synapses. + +.. code-block:: nestml + + neuron : + state: + v_comp real = 0 # rhs value is irrelevant + + +Channel description +------------------- + +Next, define one or more channels. An ion-channel is described in the following way: + +.. code-block:: nestml + + neuron : + equations: + inline real = \ + \ + @mechanism::channel + +This equation is meant to describe the contribution to the compartmental current of the described ion-channel. It can reference states of which the evolution is described by an ODE, parameters, the membrane potential and the name of other equations. The latter should be used to describe interactions between different mechanisms. + +The explicit ``@mechanism::`` descriptor has been added which we thought enhances overview over the NESTML code for the user but also makes the code-generation a bit better organised. + +As an example for a HH-type channel: + +.. code-block:: nestml + + neuron : + parameters: + gbar_Ca_HVA real = 0.00 + e_Ca_HVA real = 50.00 + + state: + v_comp real = 0.00 + + h_Ca_HVA real = 0.69823671 + m_Ca_HVA real = 0.00000918 + + equations: + inline Ca_HVA real = gbar_Ca_HVA * (h_Ca_HVA*m_Ca_HVA**2) * (e_Ca_HVA - v_comp) @mechanism::channel + m_Ca_HVA' = ( m_inf_Ca_HVA(v_comp) - m_Ca_HVA ) / (tau_m_Ca_HVA(v_comp)*1s) + h_Ca_HVA' = ( h_inf_Ca_HVA(v_comp) - h_Ca_HVA ) / (tau_h_Ca_HVA(v_comp)*1s) + + function h_inf_Ca_HVA (v_comp real) real: + ... + + function tau_h_Ca_HVA (v_comp real) real: + ... + + function m_inf_Ca_HVA (v_comp real) real: + ... + + function tau_m_Ca_HVA (v_comp real) real: + ... + +All of the currents within a compartment (marked by ``@mechanism::channel``) are added up within a compartment. + +For a complete example, please see `cm_default.nestml `_ and its associated unit test, `compartmental_model_test.py `_. + + +Concentration description +------------------------- + +The concentration-model description looks very similar: + +.. code-block:: nestml + + neuron : + equations: + ' = @mechanism::concentration + +As an example a description of a calcium concentration model where we pretend that we have the Ca_HVA and the Ca_LVAst ion-channels defined: + +.. code-block:: nestml + + neuron : + state: + c_Ca real = 0.0001 + + parameters: + gamma_Ca real = 0.04627 + tau_Ca real = 605.03 + inf_Ca real = 0.0001 + + equations: + c_Ca' = (inf_Ca - c_Ca) / (tau_Ca*1s) + (gamma_Ca * (Ca_HVA + Ca_LVAst)) / 1s @mechanism::concentration + +The only difference here is that the equation that is marked with the ``@mechanism::concentration`` descriptor is not an inline equation but an ODE. This is because in case of the ion-channel what we want to simulate is the current which relies on the evolution of some state variables like gating variables in case of the HH-models, and the compartment voltage. The concentration though can be more simply described by an evolving state directly. + +For a complete example, please see `concmech.nestml `_ and its associated unit test, `compartmental_model_test.py `_. + +Synapse description +------------------- + +Here synapse models are based on convolutions over a buffer of incoming spikes. This means that the equation for the current-contribution must contain a convolve() call and a description of the kernel used for that convolution is needed. The descriptor for synapses is ``@mechanism::receptor``. + +.. code-block:: nestml + + neuron : + equations: + inline real = \ + , ) call> \ + @mechanism::receptor + + # kernel(s) to be passed to the convolve call(s): + kernel = + + input: + <- spike + +For a complete example, please see `concmech.nestml `_ and its associated unit test, `compartmental_model_test.py `_. + +Mechanism interdependence +------------------------- + +Above examples of explicit interdependence inbetween concentration and channel models where already described. Note that it is not necessary to describe the basic interaction inherent through the contribution to the overall current of the compartment. During a simulation step all currents of channels and synapses are added up and contribute to the change of the membrane potential (v_comp) in the next timestep. Thereby one must only express a dependence explicitly if the mechanism depends on the activity of a specific channel- or synapse-type amongst multiple in a given compartment or some concentration. + + +See also +-------- + +`convert_cm_default_to_template.py `_ +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This script converts the generic parts (cm_default.* and cm_tree.*) of the default compartmental model in NEST to a .jinja template. + +It is a helper tool for developers working concurrently on the compartmental models in NEST and NESTML. It should however be used with extreme caution, as it doesn't automatically update the compartmentcurrents. diff --git a/extras/convert_cm_default_to_template.py b/extras/convert_cm_default_to_template.py new file mode 100644 index 000000000..92873d628 --- /dev/null +++ b/extras/convert_cm_default_to_template.py @@ -0,0 +1,152 @@ +# -*- coding: utf-8 -*- +# +# convert_cm_default_to_template.py +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . + +""" +This script converts the generic parts (cm_default.* and cm_tree.*) of the default compartmental model in NEST to a .jinja template. + +It is a helper tool for developers working concurrently on the compartmental models in NEST and NESTML. It should however be used with extreme caution, as it doesn't automatically update the compartmentcurrents. +""" + +import os +import argparse + + +def get_replacement_patterns(): + repl_patterns = { + # include guards + 'CM_DEFAULT_H' : 'CM_{cm_unique_suffix | upper }}_H', + 'CM_TREE_H' : 'CM_TREE_{{cm_unique_suffix | upper }}_H', + # file names + 'cm_default' : '{{neuronSpecificFileNamesCmSyns[\"main\"]}}', + 'cm_tree' : '{{neuronSpecificFileNamesCmSyns[\"tree\"]}}', + 'cm_compartmentcurrents': '{{neuronSpecificFileNamesCmSyns[\"compartmentcurrents\"]}}', + # class names + 'CompTree' : 'CompTree{{cm_unique_suffix}}', + 'Compartment' : 'Compartment{{cm_unique_suffix}}', + 'CompartmentCurrents' : 'CompartmentCurrents{{cm_unique_suffix}}', + } + return repl_patterns + + +def get_trailing_characters(): + trailing_characters = [ + ' ', # declarations + '::', # function definition + '(', # constructor, destructor,... + '*', # pointer declarations + '&', # references + '.h', # includes + ] + return trailing_characters + +def get_leading_characters(): + leading_characters = [ + 'class ', + ] + return leading_characters + +def get_excluded_substrings(): + excluded_substrings = { + 'UnknownCompartment': '#' + } + return excluded_substrings + + +def get_replacement_filenames(): + repl_fnames = { + 'cm_default.h': '@NEURON_NAME@.h.jinja2', + 'cm_default.cpp': '@NEURON_NAME@.cpp.jinja2', + 'cm_tree.h': 'cm_tree_@NEURON_NAME@.h.jinja2', + 'cm_tree.cpp': 'cm_tree_@NEURON_NAME@.cpp.jinja2' + } + return repl_fnames + + +def replace_with_exclusion(source_string, target_string, line): + if len([substr for substr in get_excluded_substrings() if substr in line]) > 0: + + line.replace(source_string, target_string) + + for exclstr in get_excluded_substrings(): + line.replace('#'*len(exclstr), exclstr) + + else: + line.replace(source_string, target_string) + + +def parse_command_line(): + parser = argparse.ArgumentParser() + + parser.add_argument('-s', '--source-path', dest='source_path', + action='store', type=str, + default='', + help='Path to the nest-simulator source code') + + parser.add_argument('-t', '--target-path', dest='target_path', + action='store', type=str, + default='../pynestml/codegeneration/resources_nest/cm_templates', + help='Path to the nest-simulator source code') + + return parser.parse_args() + + +def replace_in_file(source_path, target_path, source_name, target_name): + + with open(os.path.join(source_path, source_name), "rt") as fin: + with open(os.path.join(target_path, target_name), "wt") as fout: + for line in fin: + + for cm_default_str, jinja_templ_str in get_replacement_patterns().items(): + # we safeguard excluded substrings for replacement by + # temporarily changing their name into a pattern that does + # not occur in the replacement patterns + for excl_str, repl_char in get_excluded_substrings().items(): + line = line.replace(excl_str, repl_char*len(excl_str)) + + for trail_chr in get_trailing_characters(): + line = line.replace( + cm_default_str + trail_chr, + jinja_templ_str + trail_chr + ) + + for lead_chr in get_leading_characters(): + line = line.replace( + lead_chr + cm_default_str, + lead_chr + jinja_templ_str + ) + + for excl_str, repl_char in get_excluded_substrings().items(): + line = line.replace(repl_char*len(excl_str), excl_str) + + fout.write(line) + + +def convert_cm_default_to_templates(source_path, target_path): + source_path = os.path.join(source_path, "models/") + + for source_name, target_name in get_replacement_filenames().items(): + replace_in_file(source_path, target_path, source_name, target_name) + + +if __name__ == "__main__": + cl_args = parse_command_line() + convert_cm_default_to_templates(cl_args.source_path, cl_args.target_path) + diff --git a/pynestml/cocos/co_co_cm_channel_model.py b/pynestml/cocos/co_co_cm_channel_model.py new file mode 100644 index 000000000..937d0aa1a --- /dev/null +++ b/pynestml/cocos/co_co_cm_channel_model.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# +# co_co_cm_channel_model.py +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . + +from pynestml.cocos.co_co import CoCo +from pynestml.meta_model.ast_neuron import ASTNeuron +from pynestml.utils.channel_processing import ChannelProcessing + + +class CoCoCmChannelModel(CoCo): + @classmethod + def check_co_co(cls, neuron: ASTNeuron): + """ + Checks if this compartmental condition applies to the handed over neuron. + If yes, it checks the presence of expected functions and declarations. + :param neuron: a single neuron instance. + :type neuron: ast_neuron + """ + return ChannelProcessing.check_co_co(neuron) diff --git a/pynestml/cocos/co_co_cm_concentration_model.py b/pynestml/cocos/co_co_cm_concentration_model.py new file mode 100644 index 000000000..ec9153ff6 --- /dev/null +++ b/pynestml/cocos/co_co_cm_concentration_model.py @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- +# +# co_co_cm_concentration_model.py +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . + +from pynestml.cocos.co_co import CoCo +from pynestml.meta_model.ast_neuron import ASTNeuron +from pynestml.utils.concentration_processing import ConcentrationProcessing + + +class CoCoCmConcentrationModel(CoCo): + + @classmethod + def check_co_co(cls, neuron: ASTNeuron): + """ + Check if this compartmental condition applies to the handed over neuron. + If yes, it checks the presence of expected functions and declarations. + :param neuron: a single neuron instance. + :type neuron: ast_neuron + """ + return ConcentrationProcessing.check_co_co(neuron) diff --git a/pynestml/cocos/co_co_cm_synapse_model.py b/pynestml/cocos/co_co_cm_synapse_model.py new file mode 100644 index 000000000..866812916 --- /dev/null +++ b/pynestml/cocos/co_co_cm_synapse_model.py @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- +# +# co_co_cm_synapse_model.py +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . + +from pynestml.cocos.co_co import CoCo +from pynestml.meta_model.ast_neuron import ASTNeuron +from pynestml.utils.synapse_processing import SynapseProcessing + + +class CoCoCmSynapseModel(CoCo): + + @classmethod + def check_co_co(cls, neuron: ASTNeuron): + """ + Checks if this compartmental condition applies to the handed over neuron. + If yes, it checks the presence of expected functions and declarations. + :param neuron: a single neuron instance. + :type neuron: ast_neuron + """ + return SynapseProcessing.check_co_co(neuron) diff --git a/pynestml/cocos/co_co_v_comp_exists.py b/pynestml/cocos/co_co_v_comp_exists.py new file mode 100644 index 000000000..881bb3d6d --- /dev/null +++ b/pynestml/cocos/co_co_v_comp_exists.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*- +# +# co_co_v_comp_exists.py +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . + +from pynestml.cocos.co_co import CoCo +from pynestml.frontend.frontend_configuration import FrontendConfiguration +from pynestml.meta_model.ast_block_with_variables import ASTBlockWithVariables +from pynestml.meta_model.ast_neuron import ASTNeuron +from pynestml.utils.messages import Messages +from pynestml.utils.logger import Logger, LoggingLevel + + +class CoCoVCompDefined(CoCo): + """ + This class represents a constraint condition which ensures that variable v_comp has been + defined if we have compartmental model case. + When we start code generation with NEST_COMPARTMENTAL flag the following must exist: + state: + v_comp real = 0 + """ + + @classmethod + def check_co_co(cls, neuron: ASTNeuron): + """ + Checks if this coco applies for the handed over neuron. + Models which are supposed to be compartmental but do not contain + state variable called v_comp are not correct. + :param neuron: a single neuron instance. + :param after_ast_rewrite: indicates whether this coco is checked + after the code generator has done rewriting of the abstract syntax tree. + If True, checks are not as rigorous. Use False where possible. + """ + from pynestml.codegeneration.nest_compartmental_code_generator import NESTCompartmentalCodeGenerator + + if not FrontendConfiguration.get_target_platform().upper() == 'NEST_COMPARTMENTAL': + return + + enforced_variable_name = NESTCompartmentalCodeGenerator._default_options["compartmental_variable_name"] + + state_blocks = neuron.get_state_blocks() + if state_blocks is None: + cls.log_error(neuron, neuron.get_source_position(), enforced_variable_name) + return False + + if isinstance(state_blocks, ASTBlockWithVariables): + state_blocks = [state_blocks] + + for state_block in state_blocks: + declarations = state_block.get_declarations() + for declaration in declarations: + variables = declaration.get_variables() + for variable in variables: + variable_name = variable.get_name().lower().strip() + if variable_name == enforced_variable_name: + return True + + cls.log_error(neuron, state_blocks[0].get_source_position(), enforced_variable_name) + return False + + @classmethod + def log_error(cls, neuron: ASTNeuron, error_position, missing_variable_name): + code, message = Messages.get_v_comp_variable_value_missing(neuron.get_name(), missing_variable_name) + Logger.log_message(error_position=error_position, node=neuron, log_level=LoggingLevel.ERROR, code=code, message=message) diff --git a/pynestml/cocos/co_cos_manager.py b/pynestml/cocos/co_cos_manager.py index 256138044..ffdff65ef 100644 --- a/pynestml/cocos/co_cos_manager.py +++ b/pynestml/cocos/co_cos_manager.py @@ -25,6 +25,7 @@ from pynestml.cocos.co_co_all_variables_defined import CoCoAllVariablesDefined from pynestml.cocos.co_co_inline_expression_not_assigned_to import CoCoInlineExpressionNotAssignedTo from pynestml.cocos.co_co_input_port_not_assigned_to import CoCoInputPortNotAssignedTo +from pynestml.cocos.co_co_cm_channel_model import CoCoCmChannelModel from pynestml.cocos.co_co_convolve_cond_correctly_built import CoCoConvolveCondCorrectlyBuilt from pynestml.cocos.co_co_correct_numerator_of_unit import CoCoCorrectNumeratorOfUnit from pynestml.cocos.co_co_correct_order_in_equation import CoCoCorrectOrderInEquation @@ -51,8 +52,11 @@ from pynestml.cocos.co_co_resolution_func_legally_used import CoCoResolutionFuncLegallyUsed from pynestml.cocos.co_co_state_variables_initialized import CoCoStateVariablesInitialized from pynestml.cocos.co_co_sum_has_correct_parameter import CoCoSumHasCorrectParameter +from pynestml.cocos.co_co_cm_synapse_model import CoCoCmSynapseModel +from pynestml.cocos.co_co_cm_concentration_model import CoCoCmConcentrationModel from pynestml.cocos.co_co_input_port_qualifier_unique import CoCoInputPortQualifierUnique from pynestml.cocos.co_co_user_defined_function_correctly_defined import CoCoUserDefinedFunctionCorrectlyDefined +from pynestml.cocos.co_co_v_comp_exists import CoCoVCompDefined from pynestml.cocos.co_co_variable_once_per_scope import CoCoVariableOncePerScope from pynestml.cocos.co_co_vector_declaration_right_size import CoCoVectorDeclarationRightSize from pynestml.cocos.co_co_vector_input_port_correct_size_type import CoCoVectorInputPortsCorrectSizeType @@ -60,6 +64,7 @@ from pynestml.cocos.co_co_vector_variable_in_non_vector_declaration import CoCoVectorVariableInNonVectorDeclaration from pynestml.cocos.co_co_function_argument_template_types_consistent import CoCoFunctionArgumentTemplateTypesConsistent from pynestml.cocos.co_co_priorities_correctly_specified import CoCoPrioritiesCorrectlySpecified +from pynestml.frontend.frontend_configuration import FrontendConfiguration from pynestml.meta_model.ast_neuron import ASTNeuron from pynestml.meta_model.ast_synapse import ASTSynapse @@ -124,6 +129,26 @@ def check_variables_defined_before_usage(cls, neuron: ASTNeuron, after_ast_rewri """ CoCoAllVariablesDefined.check_co_co(neuron, after_ast_rewrite) + @classmethod + def check_v_comp_requirement(cls, neuron: ASTNeuron): + """ + In compartmental case, checks if v_comp variable was defined + :param neuron: a single neuron object + """ + CoCoVCompDefined.check_co_co(neuron) + + @classmethod + def check_compartmental_model(cls, neuron: ASTNeuron) -> None: + """ + collects all relevant information for the different compartmental mechanism classes for later code-generation + + searches for inlines or odes with decorator @mechanism:: and performs a base and, depending on type, + specific information collection process. See nestml documentation on compartmental code generation. + """ + CoCoCmChannelModel.check_co_co(neuron) + CoCoCmConcentrationModel.check_co_co(neuron) + CoCoCmSynapseModel.check_co_co(neuron) + @classmethod def check_inline_expressions_have_rhs(cls, neuron: ASTNeuron): """ @@ -380,6 +405,10 @@ def post_symbol_table_builder_checks(cls, neuron: ASTNeuron, after_ast_rewrite: cls.check_variables_unique_in_scope(neuron) cls.check_state_variables_initialized(neuron) cls.check_variables_defined_before_usage(neuron, after_ast_rewrite) + if FrontendConfiguration.get_target_platform().upper() == 'NEST_COMPARTMENTAL': + # XXX: TODO: refactor this out; define a ``cocos_from_target_name()`` in the frontend instead. + cls.check_v_comp_requirement(neuron) + cls.check_compartmental_model(neuron) cls.check_inline_expressions_have_rhs(neuron) cls.check_inline_has_max_one_lhs(neuron) cls.check_input_ports_not_assigned_to(neuron) @@ -397,9 +426,11 @@ def post_symbol_table_builder_checks(cls, neuron: ASTNeuron, after_ast_rewrite: if not after_ast_rewrite: # units might be incorrect due to e.g. refactoring convolve call (Real type assigned) cls.check_odes_have_consistent_units(neuron) - cls.check_ode_functions_have_consistent_units(neuron) # ODE functions have been removed at this point + # ODE functions have been removed at this point + cls.check_ode_functions_have_consistent_units(neuron) cls.check_correct_usage_of_kernels(neuron) - cls.check_integrate_odes_called_if_equations_defined(neuron) + if FrontendConfiguration.get_target_platform().upper() != 'NEST_COMPARTMENTAL': + cls.check_integrate_odes_called_if_equations_defined(neuron) cls.check_invariant_type_correct(neuron) cls.check_vector_in_non_vector_declaration_detected(neuron) cls.check_sum_has_correct_parameter(neuron) diff --git a/pynestml/codegeneration/nest_code_generator_utils.py b/pynestml/codegeneration/nest_code_generator_utils.py index 0787edc61..66a2c29b7 100644 --- a/pynestml/codegeneration/nest_code_generator_utils.py +++ b/pynestml/codegeneration/nest_code_generator_utils.py @@ -24,7 +24,6 @@ import re import uuid -from pynestml.frontend.pynestml_frontend import generate_nest_target from pynestml.meta_model.ast_variable import ASTVariable from pynestml.symbols.variable_symbol import BlockType from pynestml.symbols.variable_symbol import VariableSymbol @@ -76,6 +75,8 @@ def generate_code_for(cls, If a synapse is specified, returns a tuple (module_name, mangled_neuron_name, mangled_synapse_name) containing the names that can be used in ``nest.Install()``, ``nest.Create()`` and ``nest.Connect()`` calls. If no synapse is specified, returns a tuple (module_name, mangled_neuron_name). """ + from pynestml.frontend.pynestml_frontend import generate_nest_target + # generate unique ID if uniq_id is None: uniq_id = str(uuid.uuid4().hex) diff --git a/pynestml/codegeneration/nest_compartmental_code_generator.py b/pynestml/codegeneration/nest_compartmental_code_generator.py new file mode 100644 index 000000000..0318b84fb --- /dev/null +++ b/pynestml/codegeneration/nest_compartmental_code_generator.py @@ -0,0 +1,941 @@ +# -*- coding: utf-8 -*- +# +# nest_compartmental_code_generator.py +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . + +from typing import Any, Dict, List, Mapping, Optional + +import datetime +import os + +from jinja2 import Environment, FileSystemLoader, TemplateRuntimeError, Template +import pynestml +from pynestml.codegeneration.code_generator import CodeGenerator +from pynestml.codegeneration.nest_assignments_helper import NestAssignmentsHelper +from pynestml.codegeneration.nest_declarations_helper import NestDeclarationsHelper +from pynestml.codegeneration.printers.constant_printer import ConstantPrinter +from pynestml.codegeneration.printers.cpp_expression_printer import CppExpressionPrinter +from pynestml.codegeneration.printers.cpp_printer import CppPrinter +from pynestml.codegeneration.printers.cpp_simple_expression_printer import CppSimpleExpressionPrinter +from pynestml.codegeneration.printers.gsl_variable_printer import GSLVariablePrinter +from pynestml.codegeneration.printers.nest_cpp_function_call_printer import NESTCppFunctionCallPrinter +from pynestml.codegeneration.printers.nest_cpp_type_symbol_printer import NESTCppTypeSymbolPrinter +from pynestml.codegeneration.printers.nest_gsl_function_call_printer import NESTGSLFunctionCallPrinter +from pynestml.codegeneration.printers.nest_variable_printer import NESTVariablePrinter +from pynestml.codegeneration.printers.nestml_printer import NESTMLPrinter +from pynestml.codegeneration.printers.ode_toolbox_expression_printer import ODEToolboxExpressionPrinter +from pynestml.codegeneration.printers.ode_toolbox_function_call_printer import ODEToolboxFunctionCallPrinter +from pynestml.codegeneration.printers.ode_toolbox_variable_printer import ODEToolboxVariablePrinter +from pynestml.codegeneration.printers.unitless_cpp_simple_expression_printer import UnitlessCppSimpleExpressionPrinter +from pynestml.frontend.frontend_configuration import FrontendConfiguration +from pynestml.meta_model.ast_assignment import ASTAssignment +from pynestml.meta_model.ast_block_with_variables import ASTBlockWithVariables +from pynestml.meta_model.ast_input_port import ASTInputPort +from pynestml.meta_model.ast_kernel import ASTKernel +from pynestml.meta_model.ast_neuron import ASTNeuron +from pynestml.meta_model.ast_node_factory import ASTNodeFactory +from pynestml.meta_model.ast_synapse import ASTSynapse +from pynestml.meta_model.ast_variable import ASTVariable +from pynestml.symbol_table.symbol_table import SymbolTable +from pynestml.symbols.symbol import SymbolKind +from pynestml.utils.mechanism_processing import MechanismProcessing +from pynestml.utils.channel_processing import ChannelProcessing +from pynestml.utils.concentration_processing import ConcentrationProcessing +from pynestml.utils.conc_info_enricher import ConcInfoEnricher +from pynestml.utils.ast_utils import ASTUtils +from pynestml.utils.chan_info_enricher import ChanInfoEnricher +from pynestml.utils.logger import Logger +from pynestml.utils.logger import LoggingLevel +from pynestml.utils.messages import Messages +from pynestml.utils.model_parser import ModelParser +from pynestml.utils.syns_info_enricher import SynsInfoEnricher +from pynestml.utils.synapse_processing import SynapseProcessing +from pynestml.visitors.ast_random_number_generator_visitor import ASTRandomNumberGeneratorVisitor +from pynestml.visitors.ast_symbol_table_visitor import ASTSymbolTableVisitor +from odetoolbox import analysis + + +class NESTCompartmentalCodeGenerator(CodeGenerator): + r""" + Code generator for a C++ NEST extension module. + + Options: + - **neuron_parent_class**: The C++ class from which the generated NESTML neuron class inherits. Examples: ``"ArchivingNode"``, ``"StructuralPlasticityNode"``. Default: ``"ArchivingNode"``. + - **neuron_parent_class_include**: The C++ header filename to include that contains **neuron_parent_class**. Default: ``"archiving_node.h"``. + - **preserve_expressions**: Set to True, or a list of strings corresponding to individual variable names, to disable internal rewriting of expressions, and return same output as input expression where possible. Only applies to variables specified as first-order differential equations. (This parameter is passed to ODE-toolbox.) + - **simplify_expression**: For all expressions ``expr`` that are rewritten by ODE-toolbox: the contents of this parameter string are ``eval()``ed in Python to obtain the final output expression. Override for custom expression simplification steps. Example: ``sympy.simplify(expr)``. Default: ``"sympy.logcombine(sympy.powsimp(sympy.expand(expr)))"``. (This parameter is passed to ODE-toolbox.) + - **templates**: Path containing jinja templates used to generate code for NEST simulator. + - **path**: Path containing jinja templates used to generate code for NEST simulator. + - **model_templates**: A list of the jinja templates or a relative path to a directory containing the templates related to the neuron model(s). + - **module_templates**: A list of the jinja templates or a relative path to a directory containing the templates related to generating the NEST module. + - **nest_version**: A string identifying the version of NEST Simulator to generate code for. The string corresponds to the NEST Simulator git repository tag or git branch name, for instance, ``"v2.20.2"`` or ``"master"``. The default is the empty string, which causes the NEST version to be automatically identified from the ``nest`` Python module. + """ + + _default_options = { + "neuron_parent_class": "ArchivingNode", + "neuron_parent_class_include": "archiving_node.h", + "preserve_expressions": True, + "simplify_expression": "sympy.logcombine(sympy.powsimp(sympy.expand(expr)))", + "templates": { + "path": "resources_nest_compartmental/cm_neuron", + "model_templates": { + "neuron": [ + "cm_compartmentcurrents_@NEURON_NAME@.cpp.jinja2", + "cm_compartmentcurrents_@NEURON_NAME@.h.jinja2", + "@NEURON_NAME@.cpp.jinja2", + "@NEURON_NAME@.h.jinja2", + "cm_tree_@NEURON_NAME@.cpp.jinja2", + "cm_tree_@NEURON_NAME@.h.jinja2"]}, + "module_templates": ["setup"]}, + "nest_version": "", + "compartmental_variable_name": "v_comp"} + + _variable_matching_template = r"(\b)({})(\b)" + _model_templates = dict() + _module_templates = list() + + def __init__(self, options: Optional[Mapping[str, Any]] = None): + super().__init__("NEST_COMPARTMENTAL", options) + + # auto-detect NEST Simulator installed version + if not self.option_exists("nest_version") or not self.get_option("nest_version"): + from pynestml.codegeneration.nest_tools import NESTTools + nest_version = NESTTools.detect_nest_version() + self.set_options({"nest_version": nest_version}) + + self.analytic_solver = {} + self.numeric_solver = {} + # those state variables not defined as an ODE in the equations block + self.non_equations_state_variables = {} + + self.setup_template_env() + + self.setup_printers() + + # maps kernel names to their analytic solutions separately + # this is needed for the cm_syns case + self.kernel_name_to_analytic_solver = {} + + def setup_printers(self): + self._constant_printer = ConstantPrinter() + + # C++/NEST API printers + self._type_symbol_printer = NESTCppTypeSymbolPrinter() + self._nest_variable_printer = NESTVariablePrinter(expression_printer=None, with_origin=True, + with_vector_parameter=True) + self._nest_function_call_printer = NESTCppFunctionCallPrinter(None) + self._nest_function_call_printer_no_origin = NESTCppFunctionCallPrinter(None) + + self._printer = CppExpressionPrinter( + simple_expression_printer=CppSimpleExpressionPrinter(variable_printer=self._nest_variable_printer, + constant_printer=self._constant_printer, + function_call_printer=self._nest_function_call_printer)) + self._nest_variable_printer._expression_printer = self._printer + self._nest_function_call_printer._expression_printer = self._printer + self._nest_printer = CppPrinter(expression_printer=self._printer) + + self._nest_variable_printer_no_origin = NESTVariablePrinter(None, with_origin=False, + with_vector_parameter=False, + enforce_getter=False) + self._printer_no_origin = CppExpressionPrinter( + simple_expression_printer=CppSimpleExpressionPrinter(variable_printer=self._nest_variable_printer_no_origin, + constant_printer=self._constant_printer, + function_call_printer=self._nest_function_call_printer_no_origin)) + self._nest_variable_printer_no_origin._expression_printer = self._printer_no_origin + self._nest_function_call_printer_no_origin._expression_printer = self._printer_no_origin + + # GSL printers + self._gsl_variable_printer = GSLVariablePrinter(None) + self._gsl_function_call_printer = NESTGSLFunctionCallPrinter(None) + + self._gsl_printer = CppExpressionPrinter( + simple_expression_printer=UnitlessCppSimpleExpressionPrinter(variable_printer=self._gsl_variable_printer, + constant_printer=self._constant_printer, + function_call_printer=self._gsl_function_call_printer)) + self._gsl_function_call_printer._expression_printer = self._gsl_printer + + # ODE-toolbox printers + self._ode_toolbox_variable_printer = ODEToolboxVariablePrinter(None) + self._ode_toolbox_function_call_printer = ODEToolboxFunctionCallPrinter(None) + self._ode_toolbox_printer = ODEToolboxExpressionPrinter( + simple_expression_printer=UnitlessCppSimpleExpressionPrinter( + variable_printer=self._ode_toolbox_variable_printer, + constant_printer=self._constant_printer, + function_call_printer=self._ode_toolbox_function_call_printer)) + self._ode_toolbox_variable_printer._expression_printer = self._ode_toolbox_printer + self._ode_toolbox_function_call_printer._expression_printer = self._ode_toolbox_printer + + def raise_helper(self, msg): + raise TemplateRuntimeError(msg) + + def set_options(self, options: Mapping[str, Any]) -> Mapping[str, Any]: + ret = super().set_options(options) + self.setup_template_env() + + return ret + + def generate_code( + self, + neurons: List[ASTNeuron], + synapses: List[ASTSynapse] = None) -> None: + self.analyse_transform_neurons(neurons) + self.generate_neurons(neurons) + self.generate_module_code(neurons) + + def generate_module_code(self, neurons: List[ASTNeuron]) -> None: + """t + Generates code that is necessary to integrate neuron models into the NEST infrastructure. + :param neurons: a list of neurons + :type neurons: list(ASTNeuron) + """ + namespace = self._get_module_namespace(neurons) + if not os.path.exists(FrontendConfiguration.get_target_path()): + os.makedirs(FrontendConfiguration.get_target_path()) + + for _module_templ in self._module_templates: + file_name_parts = os.path.basename( + _module_templ.filename).split(".") + assert len( + file_name_parts) >= 3, "Template file name should be in the format: ``..jinja2``" + file_extension = file_name_parts[-2] + if file_extension in ["cpp", "h"]: + filename = FrontendConfiguration.get_module_name() + else: + filename = file_name_parts[0] + + file_path = str(os.path.join( + FrontendConfiguration.get_target_path(), filename)) + with open(file_path + "." + file_extension, "w+") as f: + f.write(str(_module_templ.render(namespace))) + + code, message = Messages.get_module_generated( + FrontendConfiguration.get_target_path()) + Logger.log_message(None, code, message, None, LoggingLevel.INFO) + + def _get_module_namespace(self, neurons: List[ASTNeuron]) -> Dict: + """ + Creates a namespace for generating NEST extension module code + :param neurons: List of neurons + :return: a context dictionary for rendering templates + """ + namespace = {"neurons": neurons, + "moduleName": FrontendConfiguration.get_module_name(), + "now": datetime.datetime.utcnow()} + + # auto-detect NEST Simulator installed version + if not self.option_exists("nest_version") or not self.get_option("nest_version"): + from pynestml.codegeneration.nest_tools import NESTTools + nest_version = NESTTools.detect_nest_version() + self.set_options({"nest_version": nest_version}) + + # neuron specific file names in compartmental case + neuron_name_to_filename = dict() + for neuron in neurons: + neuron_name_to_filename[neuron.get_name()] = { + "compartmentcurrents": self.get_cm_syns_compartmentcurrents_file_prefix(neuron), + "main": self.get_cm_syns_main_file_prefix(neuron), + "tree": self.get_cm_syns_tree_file_prefix(neuron) + } + namespace["perNeuronFileNamesCm"] = neuron_name_to_filename + + # compartmental case files that are not neuron specific - currently + # empty + namespace["sharedFileNamesCmSyns"] = { + } + + return namespace + + def get_cm_syns_compartmentcurrents_file_prefix(self, neuron): + return "cm_compartmentcurrents_" + neuron.get_name() + + def get_cm_syns_main_file_prefix(self, neuron): + return neuron.get_name() + + def get_cm_syns_tree_file_prefix(self, neuron): + return "cm_tree_" + neuron.get_name() + + def analyse_transform_neurons(self, neurons: List[ASTNeuron]) -> None: + """ + Analyse and transform a list of neurons. + :param neurons: a list of neurons. + """ + for neuron in neurons: + code, message = Messages.get_analysing_transforming_neuron( + neuron.get_name()) + Logger.log_message(None, code, message, None, LoggingLevel.INFO) + spike_updates = self.analyse_neuron(neuron) + neuron.spike_updates = spike_updates + + def create_ode_indict(self, + neuron: ASTNeuron, + parameters_block: ASTBlockWithVariables, + kernel_buffers: Mapping[ASTKernel, + ASTInputPort]): + odetoolbox_indict = self.transform_ode_and_kernels_to_json( + neuron, parameters_block, kernel_buffers) + odetoolbox_indict["options"] = {} + odetoolbox_indict["options"]["output_timestep_symbol"] = "__h" + return odetoolbox_indict + + def ode_solve_analytically(self, + neuron: ASTNeuron, + parameters_block: ASTBlockWithVariables, + kernel_buffers: Mapping[ASTKernel, + ASTInputPort]): + odetoolbox_indict = self.create_ode_indict( + neuron, parameters_block, kernel_buffers) + + full_solver_result = analysis( + odetoolbox_indict, + disable_stiffness_check=True, + preserve_expressions=self.get_option("preserve_expressions"), + simplify_expression=self.get_option("simplify_expression"), + log_level=FrontendConfiguration.logging_level) + + analytic_solver = None + analytic_solvers = [ + x for x in full_solver_result if x["solver"] == "analytical"] + assert len( + analytic_solvers) <= 1, "More than one analytic solver not presently supported" + if len(analytic_solvers) > 0: + analytic_solver = analytic_solvers[0] + + return full_solver_result, analytic_solver + + def ode_toolbox_analysis(self, neuron: ASTNeuron, + kernel_buffers: Mapping[ASTKernel, ASTInputPort]): + """ + Prepare data for ODE-toolbox input format, invoke ODE-toolbox analysis via its API, and return the output. + """ + assert len(neuron.get_equations_blocks()) == 1, "Only one equations block supported for now" + assert len(neuron.get_parameters_blocks()) == 1, "Only one parameters block supported for now" + + equations_block = neuron.get_equations_blocks()[0] + + if len(equations_block.get_kernels()) == 0 and len( + equations_block.get_ode_equations()) == 0: + # no equations defined -> no changes to the neuron + return None, None + + parameters_block = neuron.get_parameters_blocks()[0] + + solver_result, analytic_solver = self.ode_solve_analytically( + neuron, parameters_block, kernel_buffers) + + # if numeric solver is required, generate a stepping function that + # includes each state variable + numeric_solver = None + numeric_solvers = [ + x for x in solver_result if x["solver"].startswith("numeric")] + + if numeric_solvers: + odetoolbox_indict = self.create_ode_indict( + neuron, parameters_block, kernel_buffers) + solver_result = analysis( + odetoolbox_indict, + disable_stiffness_check=True, + disable_analytic_solver=True, + preserve_expressions=self.get_option("preserve_expressions"), + simplify_expression=self.get_option("simplify_expression"), + log_level=FrontendConfiguration.logging_level) + numeric_solvers = [ + x for x in solver_result if x["solver"].startswith("numeric")] + assert len( + numeric_solvers) <= 1, "More than one numeric solver not presently supported" + if len(numeric_solvers) > 0: + numeric_solver = numeric_solvers[0] + + return analytic_solver, numeric_solver + + def find_non_equations_state_variables(self, neuron: ASTNeuron): + assert len(neuron.get_state_blocks()) == 1, "Only one state block supported for now" + assert len(neuron.get_equations_blocks()) == 1, "Only one equations block supported for now" + + non_equations_state_variables = [] + for decl in neuron.get_state_blocks()[0].get_declarations(): + for var in decl.get_variables(): + # check if this variable is not in equations + + # if there is no equations, all variables are not in equations + if not neuron.get_equations_blocks(): + non_equations_state_variables.append(var) + continue + + # check if equation name is also a state variable + used_in_eq = False + for ode_eq in neuron.get_equations_blocks()[0].get_ode_equations(): + if ode_eq.get_lhs().get_name() == var.get_name(): + used_in_eq = True + break + + # check for any state variables being used by a kernel + for kern in neuron.get_equations_blocks()[0].get_kernels(): + for kern_var in kern.get_variables(): + if kern_var.get_name() == var.get_name(): + used_in_eq = True + break + + # if no usage found at this point, we have a non-equation state + # variable + if not used_in_eq: + non_equations_state_variables.append(var) + return non_equations_state_variables + + def analyse_neuron(self, neuron: ASTNeuron) -> List[ASTAssignment]: + """ + Analyse and transform a single neuron. + :param neuron: a single neuron. + :return: spike_updates: list of spike updates, see documentation for get_spike_update_expressions() for more information. + """ + code, message = Messages.get_start_processing_model(neuron.get_name()) + Logger.log_message(neuron, code, message, + neuron.get_source_position(), LoggingLevel.INFO) + + assert len(neuron.get_equations_blocks()) == 1, "Only one equations block supported for now" + assert len(neuron.get_state_blocks()) == 1, "Only one state block supported for now" + + equations_block = neuron.get_equations_blocks()[0] + + if equations_block is None: + # add all declared state variables as none of them are used in + # equations block + self.non_equations_state_variables[neuron.get_name()] = [] + self.non_equations_state_variables[neuron.get_name()].extend( + ASTUtils.all_variables_defined_in_block(neuron.get_state_blocks()[0])) + + return [] + + # goes through all convolve() inside ode's from equations block + # if they have delta kernels, use sympy to expand the expression, then + # find the convolve calls and replace them with constant value 1 + # then return every subexpression that had that convolve() replaced + delta_factors = ASTUtils.get_delta_factors_(neuron, equations_block) + + # goes through all convolve() inside equations block + # extracts what kernel is paired with what spike buffer + # returns pairs (kernel, spike_buffer) + kernel_buffers = ASTUtils.generate_kernel_buffers_( + neuron, equations_block) + + # replace convolve(g_E, spikes_exc) with g_E__X__spikes_exc[__d] + # done by searching for every ASTSimpleExpression inside equations_block + # which is a convolve call and substituting that call with + # newly created ASTVariable kernel__X__spike_buffer + ASTUtils.replace_convolve_calls_with_buffers_(neuron, equations_block) + + # substitute inline expressions with each other + # such that no inline expression references another inline expression + ASTUtils.make_inline_expressions_self_contained( + equations_block.get_inline_expressions()) + + # dereference inline_expressions inside ode equations + ASTUtils.replace_inline_expressions_through_defining_expressions( + equations_block.get_ode_equations(), equations_block.get_inline_expressions()) + + # generate update expressions using ode toolbox + # for each equation in the equation block attempt to solve analytically + # then attempt to solve numerically + # "update_expressions" key in those solvers contains a mapping + # {expression1: update_expression1, expression2: update_expression2} + + analytic_solver, numeric_solver = self.ode_toolbox_analysis( + neuron, kernel_buffers) + + """ + # separate analytic solutions by kernel + # this is is needed for the synaptic case + self.kernel_name_to_analytic_solver[neuron.get_name( + )] = self.ode_toolbox_anaysis_cm_syns(neuron, kernel_buffers) + """ + + self.analytic_solver[neuron.get_name()] = analytic_solver + self.numeric_solver[neuron.get_name()] = numeric_solver + + # get all variables from state block that are not found in equations + self.non_equations_state_variables[neuron.get_name()] = \ + self.find_non_equations_state_variables(neuron) + + # gather all variables used by kernels and delete their declarations + # they will be inserted later again, but this time with values redefined + # by odetoolbox, higher order variables don't get deleted here + ASTUtils.remove_initial_values_for_kernels(neuron) + + # delete all kernels as they are all converted into buffers + # and corresponding update formulas calculated by odetoolbox + # Remember them in a variable though + kernels = ASTUtils.remove_kernel_definitions_from_equations_block( + neuron) + + # Every ODE variable (a variable of order > 0) is renamed according to ODE-toolbox conventions + # their initial values are replaced by expressions suggested by ODE-toolbox. + # Differential order can now be set to 0, becase they can directly represent the value of the derivative now. + # initial value can be the same value as the originally stated one but + # it doesn't have to be + ASTUtils.update_initial_values_for_odes( + neuron, [analytic_solver, numeric_solver]) + + # remove differential equations from equations block + # those are now resolved into zero order variables and their + # corresponding updates + ASTUtils.remove_ode_definitions_from_equations_block(neuron) + + # restore state variables that were referenced by kernels + # and set their initial values by those suggested by ODE-toolbox + ASTUtils.create_initial_values_for_kernels( + neuron, [analytic_solver, numeric_solver], kernels) + + # Inside all remaining expressions, translate all remaining variable names + # according to the naming conventions of ODE-toolbox. + ASTUtils.replace_variable_names_in_expressions( + neuron, [analytic_solver, numeric_solver]) + + # find all inline kernels defined as ASTSimpleExpression + # that have a single kernel convolution aliasing variable ('__X__') + # translate all remaining variable names according to the naming + # conventions of ODE-toolbox + ASTUtils.replace_convolution_aliasing_inlines(neuron) + + # add variable __h to internals block + ASTUtils.add_timestep_symbol(neuron) + + # add propagator variables calculated by odetoolbox into internal blocks + if self.analytic_solver[neuron.get_name()] is not None: + neuron = ASTUtils.add_declarations_to_internals( + neuron, self.analytic_solver[neuron.get_name()]["propagators"]) + + # generate how to calculate the next spike update + self.update_symbol_table(neuron, kernel_buffers) + # find any spike update expressions defined by the user + spike_updates = self.get_spike_update_expressions( + neuron, kernel_buffers, [analytic_solver, numeric_solver], delta_factors) + + return spike_updates + + def compute_name_of_generated_file(self, jinja_file_name, neuron): + file_name_no_extension = os.path.basename( + jinja_file_name).split(".")[0] + + file_name_calculators = { + "CompartmentCurrents": self.get_cm_syns_compartmentcurrents_file_prefix, + "Tree": self.get_cm_syns_tree_file_prefix, + "Main": self.get_cm_syns_main_file_prefix, + } + + def compute_prefix(file_name): + for indication, file_prefix_calculator in file_name_calculators.items(): + if file_name.lower().startswith(indication.lower()): + return file_prefix_calculator(neuron) + return file_name_no_extension.lower() + "_" + neuron.get_name() + + file_extension = "" + if file_name_no_extension.lower().endswith("class"): + file_extension = "cpp" + elif file_name_no_extension.lower().endswith("header"): + file_extension = "h" + else: + file_extension = "unknown" + + return str( + os.path.join( + FrontendConfiguration.get_target_path(), + compute_prefix(file_name_no_extension))) + "." + file_extension + + def getUniqueSuffix(self, neuron: ASTNeuron) -> str: + ret = neuron.get_name().capitalize() + underscore_pos = ret.find("_") + while underscore_pos != -1: + ret = ret[:underscore_pos] + ret[underscore_pos + 1:].capitalize() + underscore_pos = ret.find("_") + return ret + + def _get_neuron_model_namespace(self, neuron: ASTNeuron) -> Dict: + """ + Returns a standard namespace for generating neuron code for NEST + :param neuron: a single neuron instance + :return: a context dictionary for rendering templates + :rtype: dict + """ + + namespace = {} + + namespace["now"] = datetime.datetime.utcnow() + namespace["tracing"] = FrontendConfiguration.is_dev + + # helper functions + namespace["ast_node_factory"] = ASTNodeFactory + namespace["assignments"] = NestAssignmentsHelper() + namespace["utils"] = ASTUtils + namespace["declarations"] = NestDeclarationsHelper(self._type_symbol_printer) + + # using random number generators? + rng_visitor = ASTRandomNumberGeneratorVisitor() + neuron.accept(rng_visitor) + namespace["norm_rng"] = rng_visitor._norm_rng_is_used + + # printers + namespace["printer"] = self._nest_printer + namespace["printer_no_origin"] = self._printer_no_origin + namespace["gsl_printer"] = self._gsl_printer + namespace["nest_printer"] = self._nest_printer + namespace["nestml_printer"] = NESTMLPrinter() + namespace["type_symbol_printer"] = self._type_symbol_printer + + # NESTML syntax keywords + namespace["PyNestMLLexer"] = {} + from pynestml.generated.PyNestMLLexer import PyNestMLLexer + for kw in dir(PyNestMLLexer): + if kw.isupper(): + namespace["PyNestMLLexer"][kw] = eval("PyNestMLLexer." + kw) + + namespace["nest_version"] = self.get_option("nest_version") + + namespace["neuronName"] = neuron.get_name() + namespace["neuron"] = neuron + namespace["moduleName"] = FrontendConfiguration.get_module_name() + namespace["has_spike_input"] = ASTUtils.has_spike_input( + neuron.get_body()) + namespace["has_continuous_input"] = ASTUtils.has_continuous_input( + neuron.get_body()) + + namespace["neuron_parent_class"] = self.get_option( + "neuron_parent_class") + namespace["neuron_parent_class_include"] = self.get_option( + "neuron_parent_class_include") + + namespace["PredefinedUnits"] = pynestml.symbols.predefined_units.PredefinedUnits + namespace["UnitTypeSymbol"] = pynestml.symbols.unit_type_symbol.UnitTypeSymbol + namespace["SymbolKind"] = pynestml.symbols.symbol.SymbolKind + + namespace["initial_values"] = {} + namespace["uses_analytic_solver"] = neuron.get_name() in self.analytic_solver.keys( + ) and self.analytic_solver[neuron.get_name()] is not None + if namespace["uses_analytic_solver"]: + namespace["analytic_state_variables"] = self.analytic_solver[neuron.get_name( + )]["state_variables"] + namespace["analytic_variable_symbols"] = { + sym: neuron.get_equations_blocks()[0].get_scope().resolve_to_symbol( + sym, SymbolKind.VARIABLE) for sym in namespace["analytic_state_variables"]} + namespace["update_expressions"] = {} + for sym, expr in self.analytic_solver[neuron.get_name( + )]["initial_values"].items(): + namespace["initial_values"][sym] = expr + for sym in namespace["analytic_state_variables"]: + expr_str = self.analytic_solver[neuron.get_name( + )]["update_expressions"][sym] + expr_ast = ModelParser.parse_expression(expr_str) + # pretend that update expressions are in "equations" block, + # which should always be present, as differential equations + # must have been defined to get here + expr_ast.update_scope( + neuron.get_equations_blocks()[0].get_scope()) + expr_ast.accept(ASTSymbolTableVisitor()) + namespace["update_expressions"][sym] = expr_ast + + namespace["propagators"] = self.analytic_solver[neuron.get_name()]["propagators"] + + # convert variables from ASTVariable instances to strings + _names = self.non_equations_state_variables[neuron.get_name()] + _names = [ASTUtils.to_ode_toolbox_processed_name( + var.get_complete_name()) for var in _names] + namespace["non_equations_state_variables"] = _names + + namespace["uses_numeric_solver"] = neuron.get_name() in self.numeric_solver.keys( + ) and self.numeric_solver[neuron.get_name()] is not None + if namespace["uses_numeric_solver"]: + namespace["numeric_state_variables"] = self.numeric_solver[neuron.get_name( + )]["state_variables"] + namespace["numeric_variable_symbols"] = { + sym: neuron.get_equations_blocks()[0].get_scope().resolve_to_symbol( + sym, SymbolKind.VARIABLE) for sym in namespace["numeric_state_variables"]} + assert not any( + [sym is None for sym in namespace["numeric_variable_symbols"].values()]) + namespace["numeric_update_expressions"] = {} + for sym, expr in self.numeric_solver[neuron.get_name( + )]["initial_values"].items(): + namespace["initial_values"][sym] = expr + for sym in namespace["numeric_state_variables"]: + expr_str = self.numeric_solver[neuron.get_name( + )]["update_expressions"][sym] + expr_ast = ModelParser.parse_expression(expr_str) + # pretend that update expressions are in "equations" block, + # which should always be present, as differential equations + # must have been defined to get here + expr_ast.update_scope( + neuron.get_equations_blocks()[0].get_scope()) + expr_ast.accept(ASTSymbolTableVisitor()) + namespace["numeric_update_expressions"][sym] = expr_ast + + namespace["spike_updates"] = neuron.spike_updates + + namespace["recordable_state_variables"] = [ + sym for sym in neuron.get_state_symbols() if namespace["declarations"].get_domain_from_type( + sym.get_type_symbol()) == "double" and sym.is_recordable and not ASTUtils.is_delta_kernel( + neuron.get_kernel_by_name( + sym.name))] + namespace["recordable_inline_expressions"] = [ + sym for sym in neuron.get_inline_expression_symbols() if namespace["declarations"].get_domain_from_type( + sym.get_type_symbol()) == "double" and sym.is_recordable] + + # parameter symbols with initial values + namespace["parameter_syms_with_iv"] = [sym for sym in neuron.get_parameter_symbols( + ) if sym.has_declaring_expression() and (not neuron.get_kernel_by_name(sym.name))] + namespace["cm_unique_suffix"] = self.getUniqueSuffix(neuron) + + # get the mechanisms info dictionaries and enrich them. + namespace["chan_info"] = ChannelProcessing.get_mechs_info(neuron) + namespace["chan_info"] = ChanInfoEnricher.enrich_with_additional_info(neuron, namespace["chan_info"]) + + namespace["syns_info"] = SynapseProcessing.get_mechs_info(neuron) + namespace["syns_info"] = SynsInfoEnricher.enrich_with_additional_info(neuron, namespace["syns_info"]) + + namespace["conc_info"] = ConcentrationProcessing.get_mechs_info(neuron) + namespace["conc_info"] = ConcInfoEnricher.enrich_with_additional_info(neuron, namespace["conc_info"]) + + chan_info_string = MechanismProcessing.print_dictionary(namespace["chan_info"], 0) + syns_info_string = MechanismProcessing.print_dictionary(namespace["syns_info"], 0) + conc_info_string = MechanismProcessing.print_dictionary(namespace["conc_info"], 0) + code, message = Messages.get_mechs_dictionary_info(chan_info_string, syns_info_string, conc_info_string) + Logger.log_message(None, code, message, None, LoggingLevel.DEBUG) + + neuron_specific_filenames = { + "compartmentcurrents": self.get_cm_syns_compartmentcurrents_file_prefix(neuron), + "main": self.get_cm_syns_main_file_prefix(neuron), + "tree": self.get_cm_syns_tree_file_prefix(neuron)} + + namespace["neuronSpecificFileNamesCmSyns"] = neuron_specific_filenames + + # there is no shared files any more + namespace["sharedFileNamesCmSyns"] = { + } + + namespace["types_printer"] = self._type_symbol_printer + + return namespace + + def update_symbol_table(self, neuron, kernel_buffers): + """ + Update symbol table and scope. + """ + SymbolTable.delete_neuron_scope(neuron.get_name()) + symbol_table_visitor = ASTSymbolTableVisitor() + symbol_table_visitor.after_ast_rewrite_ = True + neuron.accept(symbol_table_visitor) + SymbolTable.add_neuron_scope(neuron.get_name(), neuron.get_scope()) + + def _get_ast_variable(self, neuron, var_name) -> Optional[ASTVariable]: + """ + Grab the ASTVariable corresponding to the initial value by this name + """ + for decl in neuron.get_state_blocks()[0].get_declarations(): + for var in decl.variables: + if var.get_name() == var_name: + return var + return None + + def create_initial_values_for_ode_toolbox_odes( + self, neuron, solver_dicts, kernel_buffers, kernels): + """ + Add the variables used in ODEs from the ode-toolbox result dictionary as ODEs in NESTML AST. + """ + for solver_dict in solver_dicts: + if solver_dict is None: + continue + for var_name in solver_dict["initial_values"].keys(): + # original initial value expressions should have been removed + # to make place for ode-toolbox results + assert not ASTUtils.declaration_in_state_block( + neuron, var_name) + + for solver_dict in solver_dicts: + if solver_dict is None: + continue + + for var_name, expr in solver_dict["initial_values"].items(): + # here, overwrite is allowed because initial values might be + # repeated between numeric and analytic solver + + if ASTUtils.variable_in_kernels(var_name, kernels): + expr = "0" # for kernels, "initial value" returned by ode-toolbox is actually the increment value; the actual initial value is assumed to be 0 + + if not ASTUtils.declaration_in_state_block(neuron, var_name): + ASTUtils.add_declaration_to_state_block( + neuron, var_name, expr) + + def get_spike_update_expressions( + self, + neuron: ASTNeuron, + kernel_buffers, + solver_dicts, + delta_factors) -> List[ASTAssignment]: + """ + Generate the equations that update the dynamical variables when incoming spikes arrive. To be invoked after ode-toolbox. + + For example, a resulting `assignment_str` could be "I_kernel_in += (in_spikes/nS) * 1". The values are taken from the initial values for each corresponding dynamical variable, either from ode-toolbox or directly from user specification in the model. + + Note that for kernels, `initial_values` actually contains the increment upon spike arrival, rather than the initial value of the corresponding ODE dimension. + + XXX: TODO: update this function signature (+ templates) to match NESTCodegenerator::get_spike_update_expressions(). + + + """ + spike_updates = [] + + for kernel, spike_input_port in kernel_buffers: + if neuron.get_scope().resolve_to_symbol( + str(spike_input_port), SymbolKind.VARIABLE) is None: + continue + + buffer_type = neuron.get_scope().resolve_to_symbol( + str(spike_input_port), SymbolKind.VARIABLE).get_type_symbol() + + if ASTUtils.is_delta_kernel(kernel): + continue + + for kernel_var in kernel.get_variables(): + for var_order in range( + ASTUtils.get_kernel_var_order_from_ode_toolbox_result( + kernel_var.get_name(), solver_dicts)): + kernel_spike_buf_name = ASTUtils.construct_kernel_X_spike_buf_name( + kernel_var.get_name(), spike_input_port, var_order) + expr = ASTUtils.get_initial_value_from_ode_toolbox_result( + kernel_spike_buf_name, solver_dicts) + assert expr is not None, "Initial value not found for kernel " + kernel_var + expr = str(expr) + if expr in ["0", "0.", "0.0"]: + continue # skip adding the statement if we're only adding zero + + assignment_str = kernel_spike_buf_name + " += " + assignment_str += "(" + str(spike_input_port) + ")" + if expr not in ["1.", "1.0", "1"]: + assignment_str += " * (" + expr + ")" + + if not buffer_type.print_nestml_type() in ["1.", "1.0", "1"]: + assignment_str += " / (" + buffer_type.print_nestml_type() + ")" + + ast_assignment = ModelParser.parse_assignment( + assignment_str) + ast_assignment.update_scope(neuron.get_scope()) + ast_assignment.accept(ASTSymbolTableVisitor()) + + spike_updates.append(ast_assignment) + + for k, factor in delta_factors.items(): + var = k[0] + inport = k[1] + assignment_str = var.get_name() + "'" * (var.get_differential_order() - 1) + " += " + if factor not in ["1.", "1.0", "1"]: + assignment_str += "(" + self._printer.print(ModelParser.parse_expression(factor)) + ") * " + assignment_str += str(inport) + ast_assignment = ModelParser.parse_assignment(assignment_str) + ast_assignment.update_scope(neuron.get_scope()) + ast_assignment.accept(ASTSymbolTableVisitor()) + + spike_updates.append(ast_assignment) + + return spike_updates + + def transform_ode_and_kernels_to_json( + self, + neuron: ASTNeuron, + parameters_block, + kernel_buffers): + """ + Converts AST node to a JSON representation suitable for passing to ode-toolbox. + + Each kernel has to be generated for each spike buffer convolve in which it occurs, e.g. if the NESTML model code contains the statements + + convolve(G, ex_spikes) + convolve(G, in_spikes) + + then `kernel_buffers` will contain the pairs `(G, ex_spikes)` and `(G, in_spikes)`, from which two ODEs will be generated, with dynamical state (variable) names `G__X__ex_spikes` and `G__X__in_spikes`. + + :param parameters_block: ASTBlockWithVariables + :return: Dict + """ + odetoolbox_indict = {} + odetoolbox_indict["dynamics"] = [] + equations_block = neuron.get_equations_blocks()[0] + + for equation in equations_block.get_ode_equations(): + # n.b. includes single quotation marks to indicate differential + # order + lhs = ASTUtils.to_ode_toolbox_name( + equation.get_lhs().get_complete_name()) + rhs = self._ode_toolbox_printer.print(equation.get_rhs()) + entry = {"expression": lhs + " = " + rhs} + symbol_name = equation.get_lhs().get_name() + symbol = equations_block.get_scope().resolve_to_symbol( + symbol_name, SymbolKind.VARIABLE) + + entry["initial_values"] = {} + symbol_order = equation.get_lhs().get_differential_order() + for order in range(symbol_order): + iv_symbol_name = symbol_name + "'" * order + initial_value_expr = neuron.get_initial_value(iv_symbol_name) + if initial_value_expr: + expr = self._ode_toolbox_printer.print(initial_value_expr) + entry["initial_values"][ASTUtils.to_ode_toolbox_name( + iv_symbol_name)] = expr + odetoolbox_indict["dynamics"].append(entry) + + # write a copy for each (kernel, spike buffer) combination + for kernel, spike_input_port in kernel_buffers: + + if ASTUtils.is_delta_kernel(kernel): + # delta function -- skip passing this to ode-toolbox + continue + + for kernel_var in kernel.get_variables(): + expr = ASTUtils.get_expr_from_kernel_var( + kernel, kernel_var.get_complete_name()) + kernel_order = kernel_var.get_differential_order() + kernel_X_spike_buf_name_ticks = ASTUtils.construct_kernel_X_spike_buf_name( + kernel_var.get_name(), spike_input_port, kernel_order, diff_order_symbol="'") + + ASTUtils.replace_rhs_variables(expr, kernel_buffers) + + entry = {} + entry["expression"] = kernel_X_spike_buf_name_ticks + " = " + str(expr) + + # initial values need to be declared for order 1 up to kernel + # order (e.g. none for kernel function f(t) = ...; 1 for kernel + # ODE f'(t) = ...; 2 for f''(t) = ... and so on) + entry["initial_values"] = {} + for order in range(kernel_order): + iv_sym_name_ode_toolbox = ASTUtils.construct_kernel_X_spike_buf_name( + kernel_var.get_name(), spike_input_port, order, diff_order_symbol="'") + symbol_name_ = kernel_var.get_name() + "'" * order + symbol = equations_block.get_scope().resolve_to_symbol( + symbol_name_, SymbolKind.VARIABLE) + assert symbol is not None, "Could not find initial value for variable " + symbol_name_ + initial_value_expr = symbol.get_declaring_expression() + assert initial_value_expr is not None, "No initial value found for variable name " + symbol_name_ + entry["initial_values"][iv_sym_name_ode_toolbox] = self._ode_toolbox_printer.print( + initial_value_expr) + + odetoolbox_indict["dynamics"].append(entry) + + odetoolbox_indict["parameters"] = {} + if parameters_block is not None: + for decl in parameters_block.get_declarations(): + for var in decl.variables: + odetoolbox_indict["parameters"][var.get_complete_name( + )] = self._ode_toolbox_printer.print(decl.get_expression()) + + return odetoolbox_indict diff --git a/pynestml/codegeneration/printers/nest_variable_printer.py b/pynestml/codegeneration/printers/nest_variable_printer.py index 4d2f40843..696583319 100644 --- a/pynestml/codegeneration/printers/nest_variable_printer.py +++ b/pynestml/codegeneration/printers/nest_variable_printer.py @@ -43,10 +43,11 @@ class NESTVariablePrinter(CppVariablePrinter): Variable printer for C++ syntax and the NEST API. """ - def __init__(self, expression_printer: ExpressionPrinter, with_origin: bool = True, with_vector_parameter: bool = True) -> None: + def __init__(self, expression_printer: ExpressionPrinter, with_origin: bool = True, with_vector_parameter: bool = True, enforce_getter: bool = True) -> None: super().__init__(expression_printer) self.with_origin = with_origin self.with_vector_parameter = with_vector_parameter + self.enforce_getter = enforce_getter def print_variable(self, variable: ASTVariable) -> str: """ @@ -101,7 +102,11 @@ def print_variable(self, variable: ASTVariable) -> str: if symbol.is_inline_expression: # there might not be a corresponding defined state variable; insist on calling the getter function - return "get_" + self._print(variable, symbol, with_origin=False) + vector_param + "()" + if self.enforce_getter: + return "get_" + self._print(variable, symbol, with_origin=False) + vector_param + "()" + # modification to not enforce getter function: + else: + return self._print(variable, symbol, with_origin=False) assert not symbol.is_kernel(), "Cannot print kernel; kernel should have been converted during code generation" diff --git a/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/CommonPropertiesDictionaryReader.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/CommonPropertiesDictionaryReader.jinja2 new file mode 100644 index 000000000..130ae77c2 --- /dev/null +++ b/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/CommonPropertiesDictionaryReader.jinja2 @@ -0,0 +1,9 @@ +{# + In general case creates an + @param variable VariableSymbol Variable for which the initialization should be done +#} +{%- if tracing %}/* generated by {{self._TemplateReference__context.name}} */ {% endif %} +{%- if variable.has_vector_parameter() %} +{{ raise('Vector parameters not supported in common properties dictionary.') }} +{%- endif %} +updateValue< {{declarations.print_variable_type(variable)}} >(d, names::{{namespaceName}}, this->{{ printer.print(utils.get_state_variable_by_name(astnode, variable)) }} ); diff --git a/pynestml/codegeneration/resources_nest_compartmental/cm_neuron/@NEURON_NAME@.cpp.jinja2 b/pynestml/codegeneration/resources_nest_compartmental/cm_neuron/@NEURON_NAME@.cpp.jinja2 new file mode 100644 index 000000000..f7bdf3eee --- /dev/null +++ b/pynestml/codegeneration/resources_nest_compartmental/cm_neuron/@NEURON_NAME@.cpp.jinja2 @@ -0,0 +1,357 @@ +/* + * cm_default.cpp + * + * This file is part of NEST. + * + * Copyright (C) 2004 The NEST Initiative + * + * NEST is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 2 of the License, or + * (at your option) any later version. + * + * NEST is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with NEST. If not, see . + * + */ +#include "{{neuronSpecificFileNamesCmSyns["main"]}}.h" + + +namespace nest +{ + +/* + * For some reason this code block is needed. However, I have found no + * difference in calling init_recordable_pointers() from the pre_run_hook() or calibrate() function, + * except that an unused-variable warning is generated in the code-checks + */ +template <> +void +DynamicRecordablesMap< {{neuronSpecificFileNamesCmSyns["main"]}} >::create( {{neuronSpecificFileNamesCmSyns["main"]}}& host ) +{ + host.init_recordables_pointers_(); +} + +/* ---------------------------------------------------------------- + * Default and copy constructor for node + * ---------------------------------------------------------------- */ + +nest::{{neuronSpecificFileNamesCmSyns["main"]}}::{{neuronSpecificFileNamesCmSyns["main"]}}() + : ArchivingNode() + , c_tree_() + , syn_buffers_( 0 ) + , logger_( *this ) + , V_th_( -55.0 ) +{ + recordablesMap_.create( *this ); + recordables_values.resize( 0 ); +} + +nest::{{neuronSpecificFileNamesCmSyns["main"]}}::{{neuronSpecificFileNamesCmSyns["main"]}}( const {{neuronSpecificFileNamesCmSyns["main"]}}& n ) + : ArchivingNode( n ) + , c_tree_( n.c_tree_ ) + , syn_buffers_( n.syn_buffers_ ) + , logger_( *this ) + , V_th_( n.V_th_ ) +{ + recordables_values.resize( 0 ); +} + +/* ---------------------------------------------------------------- + * Node initialization functions + * ---------------------------------------------------------------- + */ +void +{{neuronSpecificFileNamesCmSyns["main"]}}::get_status( DictionaryDatum& statusdict ) const +{ + def< double >( statusdict, names::V_th, V_th_ ); + ArchivingNode::get_status( statusdict ); + + // add all recordables to the status dictionary + ( *statusdict )[ names::recordables ] = recordablesMap_.get_list(); + + // We add a list of dicts with compartment information and + // a list of dicts with receptor information to the status dictionary + ArrayDatum compartment_ad; + ArrayDatum receptor_ad; + for ( long comp_idx_ = 0; comp_idx_ != c_tree_.get_size(); comp_idx_++ ) + { + DictionaryDatum dd = DictionaryDatum( new Dictionary ); + Compartment{{cm_unique_suffix}}* compartment = c_tree_.get_compartment( comp_idx_ ); + + // add compartment info + def< long >( dd, names::comp_idx, comp_idx_ ); + def< long >( dd, names::parent_idx, compartment->p_index ); + compartment_ad.push_back( dd ); + + // add receptor info + compartment->compartment_currents.add_receptor_info( receptor_ad, compartment->comp_index ); + } + // add compartment info and receptor info to the status dictionary + def< ArrayDatum >( statusdict, names::compartments, compartment_ad ); + def< ArrayDatum >( statusdict, names::receptors, receptor_ad ); +} + +void +nest::{{neuronSpecificFileNamesCmSyns["main"]}}::set_status( const DictionaryDatum& statusdict ) +{ + updateValue< double >( statusdict, names::V_th, V_th_ ); + ArchivingNode::set_status( statusdict ); + + /** + * Add a compartment (or compartments) to the tree, so that the new compartment + * has the compartment specified by "parent_idx" as parent. The parent + * has to be in the tree, otherwise an error will be raised. We add either a + * single compartment or multiple compartments, depending on wether the + * entry was a list of dicts or a single dict + */ + if ( statusdict->known( names::compartments ) ) + { + /** + * Until an operator to explicititly append compartments is added to the + * API, we disable this functionality + */ + if ( c_tree_.get_size() > 0 ) + { + throw BadProperty( "\'compartments\' is already defined for this model" ); + } + + Datum* dat = ( *statusdict )[ names::compartments ].datum(); + ArrayDatum* ad = dynamic_cast< ArrayDatum* >( dat ); + DictionaryDatum* dd = dynamic_cast< DictionaryDatum* >( dat ); + + if ( ad != nullptr ) + { + // A list of compartments is provided, we add them all to the tree + for ( Token* tt = ( *ad ).begin(); tt != ( *ad ).end(); ++tt ) + { + // cast the Datum pointer stored within token dynamically to a + // DictionaryDatum pointer + add_compartment_( *dynamic_cast< DictionaryDatum* >( tt->datum() ) ); + } + } + else if ( dd != nullptr ) + { + // A single compartment is provided, we add add it to the tree + add_compartment_( *dd ); + } + else + { + throw BadProperty( + "\'compartments\' entry could not be identified, provide " + "list of parameter dicts for multiple compartments" ); + } + } + + /** + * Add a receptor (or receptors) to the tree, so that the new receptor + * targets the compartment specified by "comp_idx". The compartment + * has to be in the tree, otherwise an error will be raised. We add either a + * single receptor or multiple receptors, depending on wether the + * entry was a list of dicts or a single dict + */ + if ( statusdict->known( names::receptors ) ) + { + /** + * Until an operator to explicititly append receptors is added to the + * API, we disable this functionality + */ + if ( long( syn_buffers_.size() ) > 0 ) + { + throw BadProperty( "\'receptors\' is already defined for this model" ); + } + + Datum* dat = ( *statusdict )[ names::receptors ].datum(); + ArrayDatum* ad = dynamic_cast< ArrayDatum* >( dat ); + DictionaryDatum* dd = dynamic_cast< DictionaryDatum* >( dat ); + + if ( ad != nullptr ) + { + for ( Token* tt = ( *ad ).begin(); tt != ( *ad ).end(); ++tt ) + { + // cast the Datum pointer stored within token dynamically to a + // DictionaryDatum pointer + add_receptor_( *dynamic_cast< DictionaryDatum* >( tt->datum() ) ); + } + } + else if ( dd != nullptr ) + { + add_receptor_( *dd ); + } + else + { + throw BadProperty( + "\'receptors\' entry could not be identified, provide " + "list of parameter dicts for multiple receptors" ); + } + } + /** + * we need to initialize the recordables pointers to guarantee that the + * recordables of the new compartments and/or receptors will be in the + * recordables map + */ + init_recordables_pointers_(); +} +void +nest::{{neuronSpecificFileNamesCmSyns["main"]}}::add_compartment_( DictionaryDatum& dd ) +{ + if ( dd->known( names::params ) ) + { + c_tree_.add_compartment( + getValue< long >( dd, names::parent_idx ), getValue< DictionaryDatum >( dd, names::params ) ); + } + else + { + c_tree_.add_compartment( getValue< long >( dd, names::parent_idx ) ); + } +} +void +nest::{{neuronSpecificFileNamesCmSyns["main"]}}::add_receptor_( DictionaryDatum& dd ) +{ + const long compartment_idx = getValue< long >( dd, names::comp_idx ); + const std::string receptor_type = getValue< std::string >( dd, names::receptor_type ); + + // create a ringbuffer to collect spikes for the receptor + RingBuffer buffer; + + // add the ringbuffer to the global receptor vector + const size_t syn_idx = syn_buffers_.size(); + syn_buffers_.push_back( buffer ); + + // add the receptor to the compartment + Compartment{{cm_unique_suffix}}* compartment = c_tree_.get_compartment( compartment_idx ); + if ( dd->known( names::params ) ) + { + compartment->compartment_currents.add_synapse( + receptor_type, syn_idx, getValue< DictionaryDatum >( dd, names::params ) ); + } + else + { + compartment->compartment_currents.add_synapse( receptor_type, syn_idx ); + } +} + +void +nest::{{neuronSpecificFileNamesCmSyns["main"]}}::init_recordables_pointers_() +{ + /** + * Get the map of all recordables (i.e. all state variables of the model): + * --> keys are state variable names suffixed by the compartment index for + * voltage (e.g. "v_comp1") or by the synapse index for receptor currents + * --> values are pointers to the specific state variables + */ + std::map< Name, double* > recordables = c_tree_.get_recordables(); + + for ( auto rec_it = recordables.begin(); rec_it != recordables.end(); rec_it++ ) + { + // check if name is already in recordables map + auto recname_it = find( recordables_names.begin(), recordables_names.end(), rec_it->first ); + if ( recname_it == recordables_names.end() ) + { + // recordable name is not yet in map, we need to add it + recordables_names.push_back( rec_it->first ); + recordables_values.push_back( rec_it->second ); + const long rec_idx = recordables_values.size() - 1; + // add the recordable to the recordable_name -> recordable_index map + recordablesMap_.insert( rec_it->first, DataAccessFunctor< {{neuronSpecificFileNamesCmSyns["main"]}} >( *this, rec_idx ) ); + } + else + { + // recordable name is in map, we update the pointer to the recordable + long index = recname_it - recordables_names.begin(); + recordables_values[ index ] = rec_it->second; + } + } +} + +void +{%- if nest_version.startswith("v2") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") %} +nest::{{neuronSpecificFileNamesCmSyns["main"]}}::calibrate() +{%- else %} +nest::{{neuronSpecificFileNamesCmSyns["main"]}}::pre_run_hook() +{%- endif %} +{ + logger_.init(); + + // initialize the pointers within the compartment tree + c_tree_.init_pointers(); + // initialize the pointers to the synapse buffers for the receptor currents + c_tree_.set_syn_buffers( syn_buffers_ ); + // initialize the recordables pointers + init_recordables_pointers_(); + +{%- if nest_version.startswith("v2") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") %} + c_tree_.calibrate(); +{%- else %} + c_tree_.pre_run_hook(); +{%- endif %} +} + +/** + * Update and spike handling functions + */ +void +nest::{{neuronSpecificFileNamesCmSyns["main"]}}::update( Time const& origin, const long from, const long to ) +{ + assert( to >= 0 && from < kernel().connection_manager.get_min_delay() ); + assert( from < to ); + + for ( long lag = from; lag < to; ++lag ) + { + const double v_0_prev = c_tree_.get_root()->v_comp; + + c_tree_.construct_matrix( lag ); + c_tree_.solve_matrix(); + + // threshold crossing + if ( c_tree_.get_root()->v_comp >= V_th_ && v_0_prev < V_th_ ) + { + set_spiketime( Time::step( origin.get_steps() + lag + 1 ) ); + + SpikeEvent se; + kernel().event_delivery_manager.send( *this, se, lag ); + } + + logger_.record_data( origin.get_steps() + lag ); + } +} + +void +nest::{{neuronSpecificFileNamesCmSyns["main"]}}::handle( SpikeEvent& e ) +{ + if ( e.get_weight() < 0 ) + { + throw BadProperty( "Synaptic weights must be positive." ); + } + + assert( e.get_delay_steps() > 0 ); + assert( ( e.get_rport() >= 0 ) && ( ( size_t ) e.get_rport() < syn_buffers_.size() ) ); + + syn_buffers_[ e.get_rport() ].add_value( + e.get_rel_delivery_steps( kernel().simulation_manager.get_slice_origin() ), e.get_weight() * e.get_multiplicity() ); +} + +void +nest::{{neuronSpecificFileNamesCmSyns["main"]}}::handle( CurrentEvent& e ) +{ + assert( e.get_delay_steps() > 0 ); + + const double c = e.get_current(); + const double w = e.get_weight(); + + Compartment{{cm_unique_suffix}}* compartment = c_tree_.get_compartment_opt( e.get_rport() ); + compartment->currents.add_value( e.get_rel_delivery_steps( kernel().simulation_manager.get_slice_origin() ), w * c ); +} + +void +nest::{{neuronSpecificFileNamesCmSyns["main"]}}::handle( DataLoggingRequest& e ) +{ + logger_.handle( e ); +} + +} // namespace diff --git a/pynestml/codegeneration/resources_nest_compartmental/cm_neuron/@NEURON_NAME@.h.jinja2 b/pynestml/codegeneration/resources_nest_compartmental/cm_neuron/@NEURON_NAME@.h.jinja2 new file mode 100644 index 000000000..e342d5f6a --- /dev/null +++ b/pynestml/codegeneration/resources_nest_compartmental/cm_neuron/@NEURON_NAME@.h.jinja2 @@ -0,0 +1,342 @@ +/* + * {{neuronSpecificFileNamesCmSyns["main"]}}.h + * + * This file is part of NEST. + * + * Copyright (C) 2004 The NEST Initiative + * + * NEST is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 2 of the License, or + * (at your option) any later version. + * + * NEST is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with NEST. If not, see . + * + */ + +#ifndef CM_DEFAULT_H +#define CM_DEFAULT_H + +// Includes from nestkernel: +#include "archiving_node.h" +#include "event.h" +#include "nest_types.h" +#include "universal_data_logger.h" + +#include "{{neuronSpecificFileNamesCmSyns["compartmentcurrents"]}}.h" +#include "{{neuronSpecificFileNamesCmSyns["tree"]}}.h" + +namespace nest +{ + +/* BeginUserDocs: neuron, compartmental model + +Short description ++++++++++++++++++ + +A neuron model with user-defined dendrite structure. + +Description ++++++++++++ + +``cm_default`` is an implementation of a compartmental model. The structure of the +neuron -- soma, dendrites, axon -- is user-defined at runtime by adding +compartments through ``nest.SetStatus()``. Each compartment can be assigned +receptors, also through ``nest.SetStatus()``. + +The default model is passive, but sodium and potassium currents can be added +by passing non-zero conductances ``g_Na`` and ``g_K`` with the parameter dictionary +when adding compartments. Receptors can be AMPA and/or NMDA (excitatory), and +GABA (inhibitory). Ion channel and receptor currents to the compartments can be +customized through NESTML + +Usage ++++++ + +The structure of the dendrite is user defined. Thus after creation of the neuron +in the standard manner: + +.. code-block:: Python + + cm = nest.Create('cm_default') + +compartments can be added as follows: + +.. code-block:: Python + + cm.compartments = [ + {"parent_idx": -1, "params": {"e_L": -65.}}, + {"parent_idx": 0, "params": {"e_L": -60., "g_C": 0.02}} + ] + +Each compartment is assigned an index, corresponding to the order in which they +were added. Subsequently, compartment indices are used to specify parent +compartments in the tree or are used to assign receptors to the compartments. +By convention, the first compartment is the root (soma), which has no parent. +In this case, ``parent_index`` is -1. + +Synaptic receptors can be added as follows: + +.. code-block:: Python + + cm.receptors = [{ + "comp_idx": 1, + "receptor_type": "AMPA", + "params": {"e_AMPA": 0., "tau_AMPA": 3.} + }] + +Similar to compartments, each receptor is assigned an index, starting at 0 and +corresponding to the order in which they are added. This index is used +subsequently to connect synapses to the receptor: + +.. code-block:: Python + + nest.Connect(pre, cm_model, syn_spec={ + 'synapse_model': 'static_synapse', 'weight': 5., 'delay': 0.5, + 'receptor_type': 2}) + +.. note:: + + In the ``nest.SetStatus()`` call, the ``receptor_type`` entry is a string + that specifies the type of receptor. In the ``nest.Connect()`` call, the + ``receptor_type`` entry is an integer that specifies the receptor index. + +.. note:: + + Each compartments' respective "receptors" entries can be a dictionary or a list + of dictionaries containing receptor details. When a dictionary is provided, + a single compartment receptor is added to the model. When a list of dicts + is provided, multiple compartments' receptors are added with a single + ``nest.SetStatus()`` call. + +Compartment{{cm_unique_suffix}} voltages can be recorded. To do so, create a multimeter in the +standard manner but specify the recorded voltages as +``v_comp{compartment_index}``. State variables for ion channels can be recorded as well, +using the syntax ``{state_variable_name}{compartment_index}``. For receptor state +variables, use the receptor index ``{state_variable_name}{receptor_index}``: + +.. code-block:: Python + + mm = nest.Create('multimeter', 1, {'record_from': ['v_comp0'}, ...}) + +Current generators can be connected to the model. In this case, the receptor +type is the compartment index: + +.. code-block:: Python + + dc = nest.Create('dc_generator', {...}) + nest.Connect(dc, cm, syn_spec={..., 'receptor_type': 0} + +Parameters +++++++++++ + +The following parameters can be set in the status dictionary. + +=========== ======= =========================================================== + V_th mV Spike threshold (default: -55.0 mV) +=========== ======= =========================================================== + +The following parameters can be used when adding compartments using ``SetStatus()`` + +=========== ======= =============================================================== + C_m uF Capacitance of compartment (default: 1 uF) + g_C uS Coupling conductance with parent compartment (default: 0.01 uS) + g_L uS Leak conductance of the compartment (default: 0.1 uS) + e_L mV Leak reversal of the compartment (default: -70. mV) +=========== ======= =============================================================== + +Ion channels and receptor types for the default model are hardcoded. +For ion channels, there is a Na-channel and a K-channel. Parameters can be set +by specifying the following entries in the ``SetStatus`` dictionary argument: + +=========== ======= =========================================================== + gbar_Na uS Maximal conductance Na channel (default: 0 uS) + e_Na mV Reversal Na channel default (default: 50 mV) + gbar_K uS Maximal conductance K channel (default: 0 uS) + e_K mV Reversal K channel (default: -85 mV) +=========== ======= =========================================================== + +For receptors, the choice is ``AMPA``, ``GABA`` or ``NMDA`` or ``AMPA_NMDA``. +Ion channels and receptor types can be customized with :doc:`NESTML `. + +If ``receptor_type`` is AMPA + +=========== ======= =========================================================== + e_AMPA mV AMPA reversal (default 0 mV) + tau_r_AMPA ms AMPA rise time (default .2 ms) + tau_d_AMPA ms AMPA decay time (default 3. ms) +=========== ======= =========================================================== + +If ``receptor_type`` is GABA + +=========== ======= =========================================================== + e_GABA mV GABA reversal (default -80 mV) + tau_r_GABA ms GABA rise time (default .2 ms) + tau_d_GABA ms GABA decay time (default 10. ms) +=========== ======= =========================================================== + +If ``receptor_type`` is NMDA + +=========== ======= =========================================================== + e_NMDA mV NMDA reversal (default 0 mV) + tau_r_NMDA ms NMDA rise time (default .2 ms) + tau_d_NMDA ms NMDA decay time (default 43. ms) +=========== ======= =========================================================== + +If ``receptor_type`` is AMPA_NMDA + +============ ======= =========================================================== + e_AMPA_NMDA mV NMDA reversal (default 0 mV) + tau_r_AMPA ms AMPA rise time (default .2 ms) + tau_d_AMPA ms AMPA decay time (default 3. ms) + tau_r_NMDA ms NMDA rise time (default .2 ms) + tau_d_NMDA ms NMDA decay time (default 43. ms) + NMDA_ratio (1) Ratio of NMDA versus AMPA channels +============ ======= =========================================================== + +Sends ++++++ + +SpikeEvent + +Receives +++++++++ + +SpikeEvent, CurrentEvent, DataLoggingRequest + +References +++++++++++ + +Data-driven reduction of dendritic morphologies with preserved dendro-somatic responses +WAM Wybo, J Jordan, B Ellenberger, UM Mengual, T Nevian, W Senn +Elife 10, `e60936 `_ + +See also +++++++++ + +NEURON simulator ;-D + +EndUserDocs*/ + +class {{neuronSpecificFileNamesCmSyns["main"]}} : public ArchivingNode +{ + +public: + {{neuronSpecificFileNamesCmSyns["main"]}}(); + {{neuronSpecificFileNamesCmSyns["main"]}}( const {{neuronSpecificFileNamesCmSyns["main"]}}& ); + + using Node::handle; + using Node::handles_test_event; + + size_t send_test_event( Node&, size_t, synindex, bool ); + + void handle( SpikeEvent& ); + void handle( CurrentEvent& ); + void handle( DataLoggingRequest& ); + + size_t handles_test_event( SpikeEvent&, size_t ); + size_t handles_test_event( CurrentEvent&, size_t ); + size_t handles_test_event( DataLoggingRequest&, size_t ); + + void get_status( DictionaryDatum& ) const; + void set_status( const DictionaryDatum& ); + +private: + void add_compartment_( DictionaryDatum& dd ); + void add_receptor_( DictionaryDatum& dd ); + + void init_recordables_pointers_(); +{%- if nest_version.startswith("v2") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") %} + void calibrate(); +{%- else %} + void pre_run_hook(); +{%- endif %} + + void update( Time const&, const long, const long ); + + CompTree{{cm_unique_suffix}} c_tree_; + std::vector< RingBuffer > syn_buffers_; + + // To record variables with DataAccessFunctor + double + get_state_element( size_t elem ) + { + return *recordables_values[ elem ]; + }; + + // The next classes need to be friends to access the State_ class/member + friend class DataAccessFunctor< {{neuronSpecificFileNamesCmSyns["main"]}} >; + friend class DynamicRecordablesMap< {{neuronSpecificFileNamesCmSyns["main"]}} >; + friend class DynamicUniversalDataLogger< {{neuronSpecificFileNamesCmSyns["main"]}} >; + + /* + internal ordering of all recordables in a vector + the vector 'recordables_values' stores pointers to all state variables + present in the model + */ + std::vector< Name > recordables_names; + std::vector< double* > recordables_values; + + //! Mapping of recordables names to access functions + DynamicRecordablesMap< {{neuronSpecificFileNamesCmSyns["main"]}} > recordablesMap_; + //! Logger for all analog data + DynamicUniversalDataLogger< {{neuronSpecificFileNamesCmSyns["main"]}} > logger_; + + double V_th_; +}; + + +inline size_t +nest::{{neuronSpecificFileNamesCmSyns["main"]}}::send_test_event( Node& target, size_t receptor_type, synindex, bool ) +{ + SpikeEvent e; + e.set_sender( *this ); + return target.handles_test_event( e, receptor_type ); +} + +inline size_t +{{neuronSpecificFileNamesCmSyns["main"]}}::handles_test_event( SpikeEvent&, size_t receptor_type ) +{ + if ( ( receptor_type < 0 ) or ( receptor_type >= static_cast< size_t >( syn_buffers_.size() ) ) ) + { + std::ostringstream msg; + msg << "Valid spike receptor ports for " << get_name() << " are in "; + msg << "[" << 0 << ", " << syn_buffers_.size() << "["; + throw UnknownPort( receptor_type, msg.str() ); + } + return receptor_type; +} + +inline size_t +{{neuronSpecificFileNamesCmSyns["main"]}}::handles_test_event( CurrentEvent&, size_t receptor_type ) +{ + // if get_compartment returns nullptr, raise the error + if ( not c_tree_.get_compartment( long( receptor_type ), c_tree_.get_root(), 0 ) ) + { + std::ostringstream msg; + msg << "Valid current receptor ports for " << get_name() << " are in "; + msg << "[" << 0 << ", " << c_tree_.get_size() << "["; + throw UnknownPort( receptor_type, msg.str() ); + } + return receptor_type; +} + +inline size_t +{{neuronSpecificFileNamesCmSyns["main"]}}::handles_test_event( DataLoggingRequest& dlr, size_t receptor_type ) +{ + if ( receptor_type != 0 ) + { + throw UnknownReceptorType( receptor_type, get_name() ); + } + return logger_.connect_logging_device( dlr, recordablesMap_ ); +} + +} // namespace + +#endif /* #ifndef CM_{cm_unique_suffix | upper }}_H */ diff --git a/pynestml/codegeneration/resources_nest_compartmental/cm_neuron/__init__.py b/pynestml/codegeneration/resources_nest_compartmental/cm_neuron/__init__.py new file mode 100644 index 000000000..ec6cd5167 --- /dev/null +++ b/pynestml/codegeneration/resources_nest_compartmental/cm_neuron/__init__.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +# +# __init__.py +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . + +# --------------------------------------------------------------- +# Caution: This file is required to enable Python to also include the templates +# --------------------------------------------------------------- diff --git a/pynestml/codegeneration/resources_nest_compartmental/cm_neuron/cm_compartmentcurrents_@NEURON_NAME@.cpp.jinja2 b/pynestml/codegeneration/resources_nest_compartmental/cm_neuron/cm_compartmentcurrents_@NEURON_NAME@.cpp.jinja2 new file mode 100644 index 000000000..98626fc96 --- /dev/null +++ b/pynestml/codegeneration/resources_nest_compartmental/cm_neuron/cm_compartmentcurrents_@NEURON_NAME@.cpp.jinja2 @@ -0,0 +1,417 @@ +{#- +cm_compartmentcurrents_@NEURON_NAME@.cpp.jinja2 + +This file is part of NEST. + +Copyright (C) 2004 The NEST Initiative + +NEST is free software: you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation, either version 2 of the License, or +(at your option) any later version. + +NEST is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with NEST. If not, see . +#} +{%- if tracing %}/* generated by {{self._TemplateReference__context.name}} */ {% endif %} +{%- import 'directives_cpp/FunctionDeclaration.jinja2' as function_declaration with context %} +#include "{{neuronSpecificFileNamesCmSyns["compartmentcurrents"]}}.h" + +{%- set current_conductance_name_prefix = "g" %} +{%- set current_equilibrium_name_prefix = "e" %} +{% macro render_dynamic_channel_variable_name(variable_type, ion_channel_name) %} + {%- if variable_type == "gbar" %} + {{ current_conductance_name_prefix~"_"~ion_channel_name }} + {%- elif variable_type == "e" %} + {{ current_equilibrium_name_prefix~"_"~ion_channel_name }} + {%- endif %} +{%- endmacro %} + +{%- macro render_state_variable_name(pure_variable_name, ion_channel_name) %} + {{ pure_variable_name~"_"~ion_channel_name }} +{%- endmacro %} + +{% macro render_time_resolution_variable(synapse_info) %} +{# we assume here that there is only one such variable ! #} +{%- for analytic_helper_name, analytic_helper_info in synapse_info["analytic_helpers"].items() %} +{%- if analytic_helper_info["is_time_resolution"] %} + {{ analytic_helper_name }} +{%- endif %} +{%- endfor %} +{%- endmacro %} + +{% macro render_function_return_type(function) %} +{%- with %} + {%- set symbol = function.get_scope().resolve_to_symbol(function.get_name(), SymbolKind.FUNCTION) %} + {{ types_printer.print(symbol.get_return_type()) }} +{%- endwith %} +{%- endmacro %} + +{% macro render_inline_expression_type(inline_expression) %} +{%- with %} + {%- set symbol = inline_expression.get_scope().resolve_to_symbol(inline_expression.variable_name, SymbolKind.VARIABLE) %} + {{ types_printer.print(symbol.get_type_symbol()) }} +{%- endwith %} +{%- endmacro %} + +{% macro render_static_channel_variable_name(variable_type, ion_channel_name) %} + +{%- for ion_channel_nm, channel_info in chan_info.items() %} + {%- if ion_channel_nm == ion_channel_name %} + {%- for variable_tp, variable_info in channel_info["channel_parameters"].items() %} + {%- if variable_tp == variable_type %} + {%- set variable = variable_info["parameter_block_variable"] %} + {{ variable.name }} + {%- endif %} + {%- endfor %} + {%- endif %} +{%- endfor %} + +{%- endmacro %} + +{% macro render_channel_function(function, ion_channel_name) %} +{{ function_declaration.FunctionDeclaration(function, "nest::"~ion_channel_name~cm_unique_suffix~"::") }} +{ +{%- filter indent(2,True) %} +{%- with ast = function.get_block() %} +{%- include "directives_cpp/Block.jinja2" %} +{%- endwith %} +{%- endfilter %} +} +{%- endmacro %} + + +{%- for ion_channel_name, channel_info in chan_info.items() %} + +// {{ion_channel_name}} channel ////////////////////////////////////////////////////////////////// +nest::{{ion_channel_name}}{{cm_unique_suffix}}::{{ion_channel_name}}{{cm_unique_suffix}}() + +{%- for pure_variable_name, variable_info in channel_info["States"].items() %} +// state variable {{pure_variable_name -}} +{%- set variable = variable_info["ASTVariable"] %} +{%- set rhs_expression = variable_info["rhs_expression"] %} +{% if loop.first %}: {% else %}, {% endif %} +{{- variable.name}}({{ printer_no_origin.print(rhs_expression) -}}) +{%- endfor %} + +{% for variable_type, variable_info in channel_info["Parameters"].items() %} +// channel parameter {{variable_type -}} +{%- set variable = variable_info["ASTVariable"] %} +{%- set rhs_expression = variable_info["rhs_expression"] %} +,{{- variable.name }}({{ printer_no_origin.print(rhs_expression) -}}) +{%- endfor %} +{} + +nest::{{ion_channel_name}}{{cm_unique_suffix}}::{{ion_channel_name}}{{cm_unique_suffix}}(const DictionaryDatum& channel_params) + +{%- for pure_variable_name, variable_info in channel_info["States"].items() %} +// state variable {{pure_variable_name -}} +{%- set variable = variable_info["ASTVariable"] %} +{%- set rhs_expression = variable_info["rhs_expression"] %} +{% if loop.first %}: {% else %}, {% endif %} +{{- variable.name}}({{ printer_no_origin.print(rhs_expression) -}}) +{%- endfor %} + +{% for variable_type, variable_info in channel_info["Parameters"].items() %} +// channel parameter {{variable_type -}} +{%- set variable = variable_info["ASTVariable"] %} +{%- set rhs_expression = variable_info["rhs_expression"] %} +,{{- variable.name }}({{ printer_no_origin.print(rhs_expression) -}}) +{%- endfor %} +// update {{ion_channel_name}} channel parameters +{ + {%- for variable_type, variable_info in channel_info["Parameters"].items() %} + {%- set variable = variable_info["ASTVariable"] %} + {%- set dynamic_variable = render_dynamic_channel_variable_name(variable_type, ion_channel_name) %} //have to remove??????????? + // {{ion_channel_name}} channel parameter {{dynamic_variable }} + if( channel_params->known( "{{variable.name}}" ) ) + {{variable.name}} = getValue< double >( channel_params, "{{variable.name}}" ); + {%- endfor %} +} + +void +nest::{{ion_channel_name}}{{cm_unique_suffix}}::append_recordables(std::map< Name, double* >* recordables, + const long compartment_idx) +{ + // add state variables to recordables map + {%- for pure_variable_name, variable_info in channel_info["States"].items() %} + {%- set variable = variable_info["ASTVariable"] %} + ( *recordables )[ Name( "{{variable.name}}" + std::to_string(compartment_idx) )] = &{{variable.name}}; + {%- endfor %} + ( *recordables )[ Name( "i_tot_{{ion_channel_name}}" + std::to_string(compartment_idx) )] = &i_tot_{{ion_channel_name}}; +} + +std::pair< double, double > nest::{{ion_channel_name}}{{cm_unique_suffix}}::f_numstep(const double v_comp{% for ode in channel_info["Dependencies"]["concentrations"] %}, double {{ode.lhs.name}}{% endfor %} + {% for inline in channel_info["Dependencies"]["receptors"] %}, double {{inline.variable_name}}{% endfor %} + {% for inline in channel_info["Dependencies"]["channels"] %}, double {{inline.variable_name}}{% endfor %}) +{ + double g_val = 0., i_val = 0.; + + if({%- for key_zero_param in channel_info["RootInlineKeyZeros"] %} {{ key_zero_param }} > 1e-9 && {%- endfor %} true ){ + {% if channel_info["ODEs"].items()|length %} double {{ printer_no_origin.print(channel_info["time_resolution_var"]) }} = Time::get_resolution().get_ms(); {% endif %} + + {%- for ode_variable, ode_info in channel_info["ODEs"].items() %} + {%- for propagator, propagator_info in ode_info["transformed_solutions"][0]["propagators"].items() %} + double {{ propagator }} = {{ printer_no_origin.print(propagator_info["init_expression"]) }}; + {%- endfor %} + {%- for state, state_solution_info in ode_info["transformed_solutions"][0]["states"].items() %} + {{state}} = {{ printer_no_origin.print(state_solution_info["update_expression"]) }}; + {%- endfor %} + {%- endfor %} + + {%- set inline_expression = channel_info["root_expression"] %} + {%- set inline_expression_d = channel_info["inline_derivative"] %} + // compute the conductance of the {{ion_channel_name}} channel + this->i_tot_{{ion_channel_name}} = {{ printer_no_origin.print(inline_expression.get_expression()) }}; + // derivative + double d_i_tot_dv = {{ printer_no_origin.print(inline_expression_d) }}; + + g_val = - d_i_tot_dv / 2.; + i_val = this->i_tot_{{ion_channel_name}} - d_i_tot_dv * v_comp / 2.; + } + return std::make_pair(g_val, i_val); + +} + +{%- for function in channel_info["Functions"] %} +{{render_channel_function(function, ion_channel_name)}} +{%- endfor %} + +double nest::{{ion_channel_name}}{{cm_unique_suffix}}::get_current_{{ion_channel_name}}(){ + return this->i_tot_{{ion_channel_name}}; +} + +// {{ion_channel_name}} channel end /////////////////////////////////////////////////////////// +{% endfor %} +//////////////////////////////////////////////////////////////////////////////// + +{%- for synapse_name, synapse_info in syns_info.items() %} +// {{synapse_name}} synapse //////////////////////////////////////////////////////////////// +nest::{{synapse_name}}{{cm_unique_suffix}}::{{synapse_name}}{{cm_unique_suffix}}( const long syn_index ) + {%- for param_name, param_declaration in synapse_info["Parameters"].items() %} + {% if loop.first %}: {% else %}, {% endif %} + {{ param_name }}({{ printer_no_origin.print(param_declaration["rhs_expression"]) }}) + {%- endfor %} +{ + syn_idx = syn_index; +} + +nest::{{synapse_name}}{{cm_unique_suffix}}::{{synapse_name}}{{cm_unique_suffix}}( const long syn_index, const DictionaryDatum& receptor_params ) + {%- for param_name, param_declaration in synapse_info["Parameters"].items() %} + {% if loop.first %}: {% else %}, {% endif %} + {{ param_name }}({{ printer_no_origin.print(param_declaration["rhs_expression"]) }}) + {%- endfor %} +{ + syn_idx = syn_index; + + // update parameters + {%- for param_name, param_declaration in synapse_info["Parameters"].items() %} + if( receptor_params->known( "{{param_name}}" ) ) + {{param_name}} = getValue< double >( receptor_params, "{{param_name}}" ); + {%- endfor %} +} + +void +nest::{{synapse_name}}{{cm_unique_suffix}}::append_recordables(std::map< Name, double* >* recordables) +{ + {%- for convolution, convolution_info in synapse_info["convolutions"].items() %} + ( *recordables )[ Name( "{{convolution_info["kernel"]["name"]}}" + std::to_string(syn_idx) )] = &{{convolution}}; + {%- endfor %} + ( *recordables )[ Name( "i_tot_{{synapse_name}}" + std::to_string(syn_idx) )] = &i_tot_{{synapse_name}}; +} + +{%- if nest_version.startswith("v2") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") %} +void nest::{{synapse_name}}{{cm_unique_suffix}}::calibrate() +{%- else %} +void nest::{{synapse_name}}{{cm_unique_suffix}}::pre_run_hook() +{%- endif %} +{ + + const double {{render_time_resolution_variable(synapse_info)}} = Time::get_resolution().get_ms(); + + // set propagators to ode toolbox returned value + {%- for convolution, convolution_info in synapse_info["convolutions"].items() %} + {%- for state_variable_name, state_variable_info in convolution_info["analytic_solution"]["propagators"].items()%} + {{state_variable_name}} = {{ printer_no_origin.print(state_variable_info["init_expression"]) }}; + {%- endfor %} + {%- endfor %} + + // initial values for user defined states + // warning: this shadows class variables + {%- for state_name, state_declaration in synapse_info["States"].items() %} + double {{state_name}} = {{ printer_no_origin.print(state_declaration["rhs_expression"]) }}; + {%- endfor %} + + // initial values for kernel state variables, set to zero + {%- for convolution, convolution_info in synapse_info["convolutions"].items() %} + {%- for state_variable_name, state_variable_info in convolution_info["analytic_solution"]["kernel_states"].items()%} + {{state_variable_name}} = 0; + {%- endfor %} + {%- endfor %} + + // user declared internals in order they were declared + {%- for internal_name, internal_declaration in synapse_info["internals_used_declared"] %} + {{internal_name}} = {{ printer_no_origin.print(internal_declaration.get_expression()) }}; + {%- endfor %} + + {{synapse_info["buffer_name"]}}_->clear(); +} + +std::pair< double, double > nest::{{synapse_name}}{{cm_unique_suffix}}::f_numstep( const double v_comp, const long lag {% for ode in synapse_info["Dependencies"]["concentrations"] %}, double {{ode.lhs.name}}{% endfor %} + {% for inline in synapse_info["Dependencies"]["receptors"] %}, double {{inline.variable_name}}{% endfor %} + {% for inline in synapse_info["Dependencies"]["channels"] %}, double {{inline.variable_name}}{% endfor %}) +{ + // get spikes + double s_val = {{synapse_info["buffer_name"]}}_->get_value( lag ); // * g_norm_; + + //update ODE state variable + {% if synapse_info["ODEs"].items()|length %} double {{ printer_no_origin.print(synapse_info["time_resolution_var"]) }} = Time::get_resolution().get_ms(); {% endif %} + {%- for ode_variable, ode_info in synapse_info["ODEs"].items() %} + {%- for propagator, propagator_info in ode_info["transformed_solutions"][0]["propagators"].items() %} + double {{ propagator }} = {{ printer_no_origin.print(propagator_info["init_expression"]) }}; + {%- endfor %} + {%- for state, state_solution_info in ode_info["transformed_solutions"][0]["states"].items() %} + {{state}} = {{ printer_no_origin.print(state_solution_info["update_expression"]) }}; + {%- endfor %} + {%- endfor %} + + // update kernel state variable / compute synaptic conductance + {%- for convolution, convolution_info in synapse_info["convolutions"].items() %} + {%- for state_variable_name, state_variable_info in convolution_info["analytic_solution"]["kernel_states"].items() %} + {{state_variable_name}} = {{ printer_no_origin.print(state_variable_info["update_expression"]) }}; + {{state_variable_name}} += s_val * {{ printer_no_origin.print(state_variable_info["init_expression"]) }}; + + {%- endfor %} + {%- endfor %} + + // total current + // this expression should be the transformed inline expression + this->i_tot_{{synapse_name}} = {{ printer_no_origin.print(synapse_info["root_expression"].get_expression()) }}; + + // derivative of that expression + // voltage derivative of total current + // compute derivative with respect to current with sympy + double d_i_tot_dv = {{ printer_no_origin.print(synapse_info["inline_expression_d"]) }}; + + // for numerical integration + double g_val = - d_i_tot_dv / 2.; + double i_val = this->i_tot_{{synapse_name}} - d_i_tot_dv * v_comp / 2.; + + return std::make_pair(g_val, i_val); + +} + +{%- for function in synapse_info["functions_used"] %} +{{ function_declaration.FunctionDeclaration(function, "nest::"~synapse_name~cm_unique_suffix~"::") }} +{ +{%- filter indent(2,True) %} +{%- with ast = function.get_block() %} +{%- include "directives_cpp/Block.jinja2" %} +{%- endwith %} +{%- endfilter %} +} +{%- endfor %} + + double nest::{{synapse_name}}{{cm_unique_suffix}}::get_current_{{synapse_name}}(){ + return this->i_tot_{{synapse_name}}; + } + +// {{synapse_name}} synapse end /////////////////////////////////////////////////////////// +{%- endfor %} + +//////////////////////////////// concentrations +{%- for concentration_name, concentration_info in conc_info.items() %} + +// {{ concentration_name }} concentration ////////////////////////////////////////////////////////////////// +nest::{{ concentration_name }}{{cm_unique_suffix}}::{{ concentration_name }}{{cm_unique_suffix}}(): +{%- set states_written = False %} +{%- for pure_variable_name, variable_info in concentration_info["States"].items() %} +// state variable {{pure_variable_name -}} +{%- set variable = variable_info["ASTVariable"] %} +{%- set rhs_expression = variable_info["rhs_expression"] %} +{% if loop.first %} {%- set states_written = True %} {% else %}, {% endif %} +{{- variable.name}}({{ printer_no_origin.print(rhs_expression) -}}) +{%- endfor %} + +{% for variable_type, variable_info in concentration_info["Parameters"].items() %} +// channel parameter {{variable_type -}} +{%- set variable = variable_info["ASTVariable"] %} +{%- set rhs_expression = variable_info["rhs_expression"] %} +{% if loop.first %} {% if states_written %}, {% endif %} {% else %}, {% endif %} +{{- variable.name }}({{ printer_no_origin.print(rhs_expression) -}}) +{%- endfor %} +{} + +nest::{{ concentration_name }}{{cm_unique_suffix}}::{{ concentration_name }}{{cm_unique_suffix}}(const DictionaryDatum& concentration_params): +{%- set states_written = False %} +{%- for pure_variable_name, variable_info in concentration_info["States"].items() %} +// state variable {{pure_variable_name -}} +{%- set variable = variable_info["ASTVariable"] %} +{%- set rhs_expression = variable_info["rhs_expression"] %} +{% if loop.first %} {%- set states_written = True %} {% else %}, {% endif %} +{{- variable.name}}({{ printer_no_origin.print(rhs_expression) -}}) +{%- endfor %} + +{% for variable_type, variable_info in concentration_info["Parameters"].items() %} +// channel parameter {{variable_type -}} +{%- set variable = variable_info["ASTVariable"] %} +{%- set rhs_expression = variable_info["rhs_expression"] %} +{% if loop.first %} {% if states_written %}, {% endif %} {% else %}, {% endif %} +{{- variable.name }}({{ printer_no_origin.print(rhs_expression) -}}) +{%- endfor %} +// update {{ concentration_name }} concentration parameters +{ + {%- for variable_type, variable_info in concentration_info["Parameters"].items() %} + {%- set variable = variable_info["ASTVariable"] %} + {%- set dynamic_variable = render_dynamic_channel_variable_name(variable_type, concentration_name) %} //have to remove??????????? + // {{ concentration_name }} concentration parameter {{dynamic_variable }} + if( concentration_params->known( "{{variable.name}}" ) ) + {{variable.name}} = getValue< double >( concentration_params, "{{variable.name}}" ); + {%- endfor %} +} + +void +nest::{{ concentration_name }}{{cm_unique_suffix}}::append_recordables(std::map< Name, double* >* recordables, + const long compartment_idx) +{ + // add state variables to recordables map + {%- for pure_variable_name, variable_info in concentration_info["States"].items() %} + {%- set variable = variable_info["ASTVariable"] %} + ( *recordables )[ Name( "{{variable.name}}" + std::to_string(compartment_idx) )] = &{{variable.name}}; + {%- endfor %} + ( *recordables )[ Name( "{{concentration_name}}" + std::to_string(compartment_idx) )] = &{{concentration_name}}; +} + +void nest::{{ concentration_name }}{{cm_unique_suffix}}::f_numstep(const double v_comp{% for ode in concentration_info["Dependencies"]["concentrations"] %}, double {{ode.lhs.name}}{% endfor %} + {% for inline in concentration_info["Dependencies"]["receptors"] %}, double {{inline.variable_name}}{% endfor %} + {% for inline in concentration_info["Dependencies"]["channels"] %}, double {{inline.variable_name}}{% endfor %}) +{ + if({%- for key_zero_param in concentration_info["RootInlineKeyZeros"] %} {{ key_zero_param }} > 1e-9 && {%- endfor %} true ){ + double {{ printer_no_origin.print(concentration_info["time_resolution_var"]) }} = Time::get_resolution().get_ms(); + + {%- for ode_variable, ode_info in concentration_info["ODEs"].items() %} + {%- for propagator, propagator_info in ode_info["transformed_solutions"][0]["propagators"].items() %} + double {{ propagator }} = {{ printer_no_origin.print(propagator_info["init_expression"]) }}; + {%- endfor %} + {%- for state, state_solution_info in ode_info["transformed_solutions"][0]["states"].items() %} + {{state}} = {{ printer_no_origin.print(state_solution_info["update_expression"]) }}; + {%- endfor %} + {%- endfor %} + } +} + +{%- for function in concentration_info["Functions"] %} +{{render_channel_function(function, concentration_name)}} +{%- endfor %} + +double nest::{{concentration_name}}{{cm_unique_suffix}}::get_concentration_{{concentration_name}}(){ + return this->{{concentration_name}}; +} + +// {{concentration_name}} concentration end /////////////////////////////////////////////////////////// +{% endfor %} diff --git a/pynestml/codegeneration/resources_nest_compartmental/cm_neuron/cm_compartmentcurrents_@NEURON_NAME@.h.jinja2 b/pynestml/codegeneration/resources_nest_compartmental/cm_neuron/cm_compartmentcurrents_@NEURON_NAME@.h.jinja2 new file mode 100644 index 000000000..508d3331b --- /dev/null +++ b/pynestml/codegeneration/resources_nest_compartmental/cm_neuron/cm_compartmentcurrents_@NEURON_NAME@.h.jinja2 @@ -0,0 +1,470 @@ +{#- +cm_compartmentcurrents_@NEURON_NAME@.h.jinja2 + +This file is part of NEST. + +Copyright (C) 2004 The NEST Initiative + +NEST is free software: you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation, either version 2 of the License, or +(at your option) any later version. + +NEST is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with NEST. If not, see . +#} +{%- if tracing %}/* generated by {{self._TemplateReference__context.name}} */ {% endif %} +{%- import 'directives_cpp/FunctionDeclaration.jinja2' as function_declaration with context %} +#ifndef SYNAPSES_NEAT_H_{{cm_unique_suffix | upper }} +#define SYNAPSES_NEAT_H_{{cm_unique_suffix | upper }} + +#include + +#include "ring_buffer.h" + +{% macro render_variable_type(variable) %} +{%- with %} + {%- set symbol = variable.get_scope().resolve_to_symbol(variable.name, SymbolKind.VARIABLE) %} + {{ types_printer.print(symbol.type_symbol) }} +{%- endwith %} +{%- endmacro %} + +namespace nest +{ + +{%- for ion_channel_name, channel_info in chan_info.items() %} + +class {{ion_channel_name}}{{cm_unique_suffix}}{ +private: + // states + {%- for pure_variable_name, variable_info in channel_info["States"].items() %} + {%- set variable = variable_info["ASTVariable"] %} + {%- set rhs_expression = variable_info["rhs_expression"] %} + {{ render_variable_type(variable) }} {{ variable.name }} = {{ printer_no_origin.print(rhs_expression) }}; + {%- endfor %} + + // parameters + {%- for pure_variable_name, variable_info in channel_info["Parameters"].items() %} + {%- set variable = variable_info["ASTVariable"] %} + {%- set rhs_expression = variable_info["rhs_expression"] %} + {{ render_variable_type(variable) }} {{ variable.name }} = {{ printer_no_origin.print(rhs_expression) }}; + {%- endfor %} + + // ion-channel root-inline value + double i_tot_{{ion_channel_name}} = 0; + +public: + // constructor, destructor + {{ion_channel_name}}{{cm_unique_suffix}}(); + {{ion_channel_name}}{{cm_unique_suffix}}(const DictionaryDatum& channel_params); + ~{{ion_channel_name}}{{cm_unique_suffix}}(){}; + + // initialization channel +{%- if nest_version.startswith("v2") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") %} + void calibrate() { +{%- else %} + void pre_run_hook() { +{%- endif %} + // states + {%- for pure_variable_name, variable_info in channel_info["States"].items() %} + {%- set variable = variable_info["ASTVariable"] %} + {%- set rhs_expression = variable_info["rhs_expression"] %} + {{ variable.name }} = {{ printer_no_origin.print(rhs_expression) }}; + {%- endfor %} + }; + void append_recordables(std::map< Name, double* >* recordables, + const long compartment_idx); + + // numerical integration step + std::pair< double, double > f_numstep( const double v_comp{% for ode in channel_info["Dependencies"]["concentrations"] %}, double {{ode.lhs.name}}{% endfor %}{% if channel_info["Dependencies"]["receptors"]|length %} + {% endif %}{% for inline in channel_info["Dependencies"]["receptors"] %}, double {{inline.variable_name}}{% endfor %}{% if channel_info["Dependencies"]["channels"]|length %} + {% endif %}{% for inline in channel_info["Dependencies"]["channels"] %}, double {{inline.variable_name}}{% endfor %}); + + // function declarations + +{%- for function in channel_info["Functions"] %} + {{ function_declaration.FunctionDeclaration(function) }}; +{%- endfor %} + + // root_inline getter + double get_current_{{ion_channel_name}}(); + +}; +{% endfor %} + + +////////////////////////////////////////////////// synapses + +{% macro render_time_resolution_variable(synapse_info) %} +{# we assume here that there is only one such variable ! #} +{%- for analytic_helper_name, analytic_helper_info in synapse_info["analytic_helpers"].items() %} +{%- if analytic_helper_info["is_time_resolution"] %} + {{ analytic_helper_name }} +{%- endif %} +{%- endfor %} +{%- endmacro %} + +{%- for synapse_name, synapse_info in syns_info.items() %} + +class {{synapse_name}}{{cm_unique_suffix}}{ +private: + // global synapse index + long syn_idx = 0; + + // propagators, initialized via pre_run_hook() or calibrate() + {%- for convolution, convolution_info in synapse_info["convolutions"].items() %} + {%- for state_variable_name, state_variable_info in convolution_info["analytic_solution"]["propagators"].items()%} + double {{state_variable_name}}; + {%- endfor %} + {%- endfor %} + + // kernel state variables, initialized via pre_run_hook() or calibrate() + {%- for convolution, convolution_info in synapse_info["convolutions"].items() %} + {%- for state_variable_name, state_variable_info in convolution_info["analytic_solution"]["kernel_states"].items()%} + double {{state_variable_name}}; + {%- endfor %} + {%- endfor %} + + // user defined parameters, initialized via pre_run_hook() or calibrate() + {%- for param_name, param_declaration in synapse_info["Parameters"].items() %} + double {{param_name}}; + {%- endfor %} + + // states + {%- for pure_variable_name, variable_info in synapse_info["States"].items() %} + {%- set variable = variable_info["ASTVariable"] %} + {%- set rhs_expression = variable_info["rhs_expression"] %} + {{ render_variable_type(variable) }} {{ variable.name }} = {{ printer_no_origin.print(rhs_expression) }}; + {%- endfor %} + double i_tot_{{synapse_name}} = 0; + + // user declared internals in order they were declared, initialized via pre_run_hook() or calibrate() + {%- for internal_name, internal_declaration in synapse_info["internals_used_declared"] %} + double {{internal_name}}; + {%- endfor %} + + + + // spike buffer + RingBuffer* {{synapse_info["buffer_name"]}}_; + +public: + // constructor, destructor + {{synapse_name}}{{cm_unique_suffix}}( const long syn_index); + {{synapse_name}}{{cm_unique_suffix}}( const long syn_index, const DictionaryDatum& receptor_params); + ~{{synapse_name}}{{cm_unique_suffix}}(){}; + + long + get_syn_idx() + { + return syn_idx; + }; + + // numerical integration step + std::pair< double, double > f_numstep( const double v_comp, const long lag {% for ode in synapse_info["Dependencies"]["concentrations"] %}, double {{ode.lhs.name}}{% endfor %}{% if synapse_info["Dependencies"]["receptors"]|length %} + {% endif %}{% for inline in synapse_info["Dependencies"]["receptors"] %}, double {{inline.variable_name}}{% endfor %}{% if synapse_info["Dependencies"]["channels"]|length %} + {% endif %}{% for inline in synapse_info["Dependencies"]["channels"] %}, double {{inline.variable_name}}{% endfor %}); + + // calibration +{%- if nest_version.startswith("v2") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") %} + void calibrate(); +{%- else %} + void pre_run_hook(); +{%- endif %} + void append_recordables(std::map< Name, double* >* recordables); + void set_buffer_ptr( std::vector< RingBuffer >& syn_buffers ) + { + {{synapse_info["buffer_name"]}}_ = &syn_buffers[ syn_idx ]; + }; + + // function declarations + {%- for function in synapse_info["Functions"] %} + {{ function_declaration.FunctionDeclaration(function, "") -}}; + + {% endfor %} + + // root_inline getter + double get_current_{{synapse_name}}(); +}; + + +{% endfor %} + +///////////////////////////////////////////// concentrations + +{%- for concentration_name, concentration_info in conc_info.items() %} + +class {{ concentration_name }}{{cm_unique_suffix}}{ +private: + // parameters + {%- for pure_variable_name, variable_info in concentration_info["Parameters"].items() %} + {%- set variable = variable_info["ASTVariable"] %} + {%- set rhs_expression = variable_info["rhs_expression"] %} + {{ render_variable_type(variable) }} {{ variable.name }} = {{ printer_no_origin.print(rhs_expression) }}; + {%- endfor %} + + // states + {%- for pure_variable_name, variable_info in concentration_info["States"].items() %} + {%- set variable = variable_info["ASTVariable"] %} + {%- set rhs_expression = variable_info["rhs_expression"] %} + {{ render_variable_type(variable) }} {{ variable.name }} = {{ printer_no_origin.print(rhs_expression) }}; + {%- endfor %} + + // concentration value (root-ode state) + double {{concentration_name}} = 0; + +public: + // constructor, destructor + {{ concentration_name }}{{cm_unique_suffix}}(); + {{ concentration_name }}{{cm_unique_suffix}}(const DictionaryDatum& concentration_params); + ~{{ concentration_name }}{{cm_unique_suffix}}(){}; + + // initialization channel +{%- if nest_version.startswith("v2") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") %} + void calibrate() { +{%- else %} + void pre_run_hook() { +{%- endif %} + // states + {%- for pure_variable_name, variable_info in concentration_info["States"].items() %} + {%- set variable = variable_info["ASTVariable"] %} + {%- set rhs_expression = variable_info["rhs_expression"] %} + {{ variable.name }} = {{ printer_no_origin.print(rhs_expression) }}; + {%- endfor %} + }; + void append_recordables(std::map< Name, double* >* recordables, + const long compartment_idx); + + // numerical integration step + void f_numstep( const double v_comp{% for ode in concentration_info["Dependencies"]["concentrations"] %}, double {{ode.lhs.name}}{% endfor %}{% if concentration_info["Dependencies"]["receptors"]|length %} + {% endif %}{% for inline in concentration_info["Dependencies"]["receptors"] %}, double {{inline.variable_name}}{% endfor %}{% if concentration_info["Dependencies"]["channels"]|length %} + {% endif %}{% for inline in concentration_info["Dependencies"]["channels"] %}, double {{inline.variable_name}}{% endfor %}); + + // function declarations +{%- for function in concentration_info["Functions"] %} + {{ function_declaration.FunctionDeclaration(function) }}; +{%- endfor %} + + // root_ode getter + double get_concentration_{{concentration_name}}(); + +}; +{% endfor %} + +///////////////////////////////////////////// currents + +{%- set channel_suffix = "_chan_" %} +{%- set concentration_suffix = "_conc_" %} + +class CompartmentCurrents{{cm_unique_suffix}} { +private: + // ion channels +{% with %} + {%- for ion_channel_name, channel_info in chan_info.items() %} + {{ion_channel_name}}{{cm_unique_suffix}} {{ion_channel_name}}{{channel_suffix}}; + {% endfor %} +{% endwith %} + + // synapses + {%- for synapse_name, synapse_info in syns_info.items() %} + std::vector < {{synapse_name}}{{cm_unique_suffix}} > {{synapse_name}}_syns_; + {% endfor %} + + //concentrations +{% with %} + {%- for concentration_name, concentration_info in conc_info.items() %} + {{concentration_name}}{{cm_unique_suffix}} {{concentration_name}}{{concentration_suffix}}; + {% endfor %} +{% endwith %} + +public: + CompartmentCurrents{{cm_unique_suffix}}(){}; + explicit CompartmentCurrents{{cm_unique_suffix}}(const DictionaryDatum& compartment_params) + { + {%- for ion_channel_name, channel_info in chan_info.items() %} + {{ion_channel_name}}{{channel_suffix}} = {{ion_channel_name}}{{cm_unique_suffix}}( compartment_params ); + {% endfor %} + + {%- for concentration_name, concentration_info in conc_info.items() %} + {{ concentration_name }}{{concentration_suffix}} = {{ concentration_name }}{{cm_unique_suffix}}( compartment_params ); + {% endfor %} + }; + ~CompartmentCurrents{{cm_unique_suffix}}(){}; + +{%- if nest_version.startswith("v2") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") %} + void calibrate() { +{%- else %} + void pre_run_hook() { +{%- endif %} + // initialization of ion channels + {%- for ion_channel_name, channel_info in chan_info.items() %} +{%- if nest_version.startswith("v2") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") %} + {{ion_channel_name}}{{channel_suffix}}.calibrate(); +{%- else %} + {{ion_channel_name}}{{channel_suffix}}.pre_run_hook(); +{%- endif %} + {% endfor %} + + // initialization of concentrations + {%- for concentration_name, concentration_info in conc_info.items() %} +{%- if nest_version.startswith("v2") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") %} + {{ concentration_name }}{{concentration_suffix}}.calibrate(); +{%- else %} + {{ concentration_name }}{{concentration_suffix}}.pre_run_hook(); +{%- endif %} + {% endfor %} + + // initialization of synapses + {%- for synapse_name, synapse_info in syns_info.items() %} + // initialization of {{synapse_name}} synapses + for( auto syn_it = {{synapse_name}}_syns_.begin(); + syn_it != {{synapse_name}}_syns_.end(); + ++syn_it ) + { +{%- if nest_version.startswith("v2") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") %} + syn_it->calibrate(); +{%- else %} + syn_it->pre_run_hook(); +{%- endif %} + } + {% endfor %} + }; + + void add_synapse( const std::string& type, const long syn_idx ) + { + {%- for synapse_name, synapse_info in syns_info.items() %} + {% if not loop.first %}else{% endif %} if ( type == "{{synapse_name}}" ) + { + {{synapse_name}}_syns_.push_back( {{synapse_name}}{{cm_unique_suffix}}( syn_idx ) ); + } + {% endfor %} + else + { + assert( false ); + } + }; + void add_synapse( const std::string& type, const long syn_idx, const DictionaryDatum& receptor_params ) + { + {%- for synapse_name, synapse_info in syns_info.items() %} + {% if not loop.first %}else{% endif %} if ( type == "{{synapse_name}}" ) + { + {{synapse_name}}_syns_.push_back( {{synapse_name}}{{cm_unique_suffix}}( syn_idx, receptor_params ) ); + } + {% endfor %} + else + { + assert( false ); + } + }; + + void + add_receptor_info( ArrayDatum& ad, const long compartment_index ) + { + {%- for synapse_name, synapse_info in syns_info.items() %} + for( auto syn_it = {{synapse_name}}_syns_.begin(); syn_it != {{synapse_name}}_syns_.end(); syn_it++) + { + DictionaryDatum dd = DictionaryDatum( new Dictionary ); + def< long >( dd, names::receptor_idx, syn_it->get_syn_idx() ); + def< long >( dd, names::comp_idx, compartment_index ); + def< std::string >( dd, names::receptor_type, "{{synapse_name}}" ); + ad.push_back( dd ); + } + {% endfor %} + }; + + void + set_syn_buffers( std::vector< RingBuffer >& syn_buffers ) + { + // spike buffers for synapses + {%- for synapse_name, synapse_info in syns_info.items() %} + for( auto syn_it = {{synapse_name}}_syns_.begin(); syn_it != {{synapse_name}}_syns_.end(); syn_it++) + syn_it->set_buffer_ptr( syn_buffers ); + {% endfor %} + }; + + std::map< Name, double* > + get_recordables( const long compartment_idx ) + { + std::map< Name, double* > recordables; + + // append ion channel state variables to recordables + {%- for ion_channel_name, channel_info in chan_info.items() %} + {{ion_channel_name}}{{channel_suffix}}.append_recordables( &recordables, compartment_idx ); + {% endfor %} + + // append concentration state variables to recordables + {%- for concentration_name, concentration_info in conc_info.items() %} + {{concentration_name}}{{concentration_suffix}}.append_recordables( &recordables, compartment_idx ); + {% endfor %} + + // append synapse state variables to recordables + {%- for synapse_name, synapse_info in syns_info.items() %} + for( auto syn_it = {{synapse_name}}_syns_.begin(); syn_it != {{synapse_name}}_syns_.end(); syn_it++) + syn_it->append_recordables( &recordables ); + {% endfor %} + + return recordables; + }; + + std::pair< double, double > + f_numstep( const double v_comp, const long lag ) + { + std::pair< double, double > gi(0., 0.); + double g_val = 0.; + double i_val = 0.; +{%- for synapse_name, synapse_info in syns_info.items() %} + double {{synapse_name}}{{channel_suffix}}current_sum = 0; + for( auto syn_it = {{synapse_name}}_syns_.begin(); + syn_it != {{synapse_name}}_syns_.end(); + ++syn_it ) + { + {{synapse_name}}{{channel_suffix}}current_sum += syn_it->get_current_{{synapse_name}}(); + } +{% endfor %} + + {%- for concentration_name, concentration_info in conc_info.items() %} + // computation of {{ concentration_name }} concentration + {{ concentration_name }}{{concentration_suffix}}.f_numstep( v_comp{% for ode in concentration_info["Dependencies"]["concentrations"] %}, {{ode.lhs.name}}{{concentration_suffix}}.get_concentration_{{ode.lhs.name}}(){% endfor %}{% if concentration_info["Dependencies"]["receptors"]|length %} + {% endif %}{% for inline in concentration_info["Dependencies"]["receptors"] %}, {{inline.variable_name}}{{channel_suffix}}_current_sum{% endfor %}{% if concentration_info["Dependencies"]["channels"]|length %} + {% endif %}{% for inline in concentration_info["Dependencies"]["channels"] %}, {{inline.variable_name}}{{channel_suffix}}.get_current_{{inline.variable_name}}(){% endfor %}); + + {% endfor %} + + {%- for ion_channel_name, channel_info in chan_info.items() %} + // contribution of {{ion_channel_name}} channel + gi = {{ion_channel_name}}{{channel_suffix}}.f_numstep( v_comp{% for ode in channel_info["Dependencies"]["concentrations"] %}, {{ode.lhs.name}}{{concentration_suffix}}.get_concentration_{{ode.lhs.name}}(){% endfor %}{% if channel_info["Dependencies"]["receptors"]|length %} + {% endif %}{% for inline in channel_info["Dependencies"]["receptors"] %}, {{inline.variable_name}}{{channel_suffix}}_current_sum{% endfor %}{% if channel_info["Dependencies"]["channels"]|length %} + {% endif %}{% for inline in channel_info["Dependencies"]["channels"] %}, {{inline.variable_name}}{{channel_suffix}}.get_current_{{inline.variable_name}}(){% endfor %}); + + g_val += gi.first; + i_val += gi.second; + + {% endfor %} + + {%- for synapse_name, synapse_info in syns_info.items() %} + // contribution of {{synapse_name}} synapses + for( auto syn_it = {{synapse_name}}_syns_.begin(); + syn_it != {{synapse_name}}_syns_.end(); + ++syn_it ) + { + gi = syn_it->f_numstep( v_comp, lag {% for ode in synapse_info["Dependencies"]["concentrations"] %}, {{ode.lhs.name}}{{concentration_suffix}}.get_concentration_{{ode.lhs.name}}(){% endfor %}{% if synapse_info["Dependencies"]["receptors"]|length %} + {% endif %}{% for inline in synapse_info["Dependencies"]["receptors"] %}, {{inline.variable_name}}{{channel_suffix}}_current_sum{% endfor %}{% if synapse_info["Dependencies"]["channels"]|length %} + {% endif %}{% for inline in synapse_info["Dependencies"]["channels"] %}, {{inline.variable_name}}{{channel_suffix}}.get_current{{inline.variable_name}}(){% endfor %}); + + g_val += gi.first; + i_val += gi.second; + } + {% endfor %} + + return std::make_pair(g_val, i_val); + }; +}; + +} // namespace + +#endif /* #ifndef SYNAPSES_NEAT_H_{{cm_unique_suffix | upper }} */ diff --git a/pynestml/codegeneration/resources_nest_compartmental/cm_neuron/cm_tree_@NEURON_NAME@.cpp.jinja2 b/pynestml/codegeneration/resources_nest_compartmental/cm_neuron/cm_tree_@NEURON_NAME@.cpp.jinja2 new file mode 100644 index 000000000..38bf6d446 --- /dev/null +++ b/pynestml/codegeneration/resources_nest_compartmental/cm_neuron/cm_tree_@NEURON_NAME@.cpp.jinja2 @@ -0,0 +1,515 @@ +/* + * cm_tree.cpp + * + * This file is part of NEST. + * + * Copyright (C) 2004 The NEST Initiative + * + * NEST is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 2 of the License, or + * (at your option) any later version. + * + * NEST is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with NEST. If not, see . + * + */ +#include "{{neuronSpecificFileNamesCmSyns["tree"]}}.h" + + +nest::Compartment{{cm_unique_suffix}}::Compartment{{cm_unique_suffix}}( const long compartment_index, const long parent_index ) + : xx_( 0.0 ) + , yy_( 0.0 ) + , comp_index( compartment_index ) + , p_index( parent_index ) + , parent( nullptr ) + , v_comp( 0.0 ) + , ca( 1.0 ) + , gc( 0.01 ) + , gl( 0.1 ) + , el( -70. ) + , gg0( 0.0 ) + , ca__div__dt( 0.0 ) + , gl__div__2( 0.0 ) + , gc__div__2( 0.0 ) + , gl__times__el( 0.0 ) + , ff( 0.0 ) + , gg( 0.0 ) + , hh( 0.0 ) + , n_passed( 0 ) +{ + v_comp = el; + + compartment_currents = CompartmentCurrents{{cm_unique_suffix}}(); +} +nest::Compartment{{cm_unique_suffix}}::Compartment{{cm_unique_suffix}}( const long compartment_index, + const long parent_index, + const DictionaryDatum& compartment_params ) + : xx_( 0.0 ) + , yy_( 0.0 ) + , comp_index( compartment_index ) + , p_index( parent_index ) + , parent( nullptr ) + , v_comp( 0.0 ) + , ca( 1.0 ) + , gc( 0.01 ) + , gl( 0.1 ) + , el( -70. ) + , gg0( 0.0 ) + , ca__div__dt( 0.0 ) + , gl__div__2( 0.0 ) + , gc__div__2( 0.0 ) + , gl__times__el( 0.0 ) + , ff( 0.0 ) + , gg( 0.0 ) + , hh( 0.0 ) + , n_passed( 0 ) +{ + + updateValue< double >( compartment_params, names::C_m, ca ); + updateValue< double >( compartment_params, names::g_C, gc ); + updateValue< double >( compartment_params, names::g_L, gl ); + updateValue< double >( compartment_params, names::e_L, el ); + + v_comp = el; + + compartment_currents = CompartmentCurrents{{cm_unique_suffix}}( compartment_params ); +} + +void +{%- if nest_version.startswith("v2") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") %} +nest::Compartment{{cm_unique_suffix}}::calibrate() +{%- else %} +nest::Compartment{{cm_unique_suffix}}::pre_run_hook() +{%- endif %} +{ +{%- if nest_version.startswith("v2") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") %} + compartment_currents.calibrate(); +{%- else %} + compartment_currents.pre_run_hook(); +{%- endif %} + + const double dt = Time::get_resolution().get_ms(); + ca__div__dt = ca / dt; + gl__div__2 = gl / 2.; + gg0 = ca__div__dt + gl__div__2; + gc__div__2 = gc / 2.; + gl__times__el = gl * el; + + // initialize the buffer + currents.clear(); +} + +std::map< Name, double* > +nest::Compartment{{cm_unique_suffix}}::get_recordables() +{ + std::map< Name, double* > recordables = compartment_currents.get_recordables( comp_index ); + + recordables.insert( recordables.begin(), recordables.end() ); + recordables[ Name( "v_comp" + std::to_string( comp_index ) ) ] = &v_comp; + + return recordables; +} + +// for matrix construction +void +nest::Compartment{{cm_unique_suffix}}::construct_matrix_element( const long lag ) +{ + // matrix diagonal element + gg = gg0; + + if ( parent != nullptr ) + { + gg += gc__div__2; + // matrix off diagonal element + hh = -gc__div__2; + } + + for ( auto child_it = children.begin(); child_it != children.end(); ++child_it ) + { + gg += ( *child_it ).gc__div__2; + } + + // right hand side + ff = ( ca__div__dt - gl__div__2 ) * v_comp + gl__times__el; + + if ( parent != nullptr ) + { + ff -= gc__div__2 * ( v_comp - parent->v_comp ); + } + + for ( auto child_it = children.begin(); child_it != children.end(); ++child_it ) + { + ff -= ( *child_it ).gc__div__2 * ( v_comp - ( *child_it ).v_comp ); + } + + // add all currents to compartment + std::pair< double, double > gi = compartment_currents.f_numstep( v_comp, lag ); + gg += gi.first; + ff += gi.second; + + // add input current + ff += currents.get_value( lag ); +} + + +nest::CompTree{{cm_unique_suffix}}::CompTree{{cm_unique_suffix}}() + : root_( -1, -1 ) + , size_( 0 ) +{ + compartments_.resize( 0 ); + leafs_.resize( 0 ); +} + +/** + * Add a compartment to the tree structure via the python interface + * root shoud have -1 as parent index. Add root compartment first. + * Assumes parent of compartment is already added + */ +void +nest::CompTree{{cm_unique_suffix}}::add_compartment( const long parent_index ) +{ + Compartment{{cm_unique_suffix}}* compartment = new Compartment{{cm_unique_suffix}}( size_, parent_index ); + add_compartment( compartment, parent_index ); +} + +void +nest::CompTree{{cm_unique_suffix}}::add_compartment( const long parent_index, const DictionaryDatum& compartment_params ) +{ + Compartment{{cm_unique_suffix}}* compartment = new Compartment{{cm_unique_suffix}}( size_, parent_index, compartment_params ); + add_compartment( compartment, parent_index ); +} + +void +nest::CompTree{{cm_unique_suffix}}::add_compartment( Compartment{{cm_unique_suffix}}* compartment, const long parent_index ) +{ + size_++; + + if ( parent_index >= 0 ) + { + /** + * we do not raise an UnknownCompartment exception from within + * get_compartment(), because we want to print a more informative + * exception message + */ + Compartment{{cm_unique_suffix}}* parent = get_compartment( parent_index, get_root(), 0 ); + if ( parent == nullptr ) + { + std::string msg = "does not exist in tree, but was specified as a parent compartment"; + throw UnknownCompartment( parent_index, msg ); + } + + parent->children.push_back( *compartment ); + } + else + { + // we raise an error if the root already exists + if ( root_.comp_index >= 0 ) + { + std::string msg = ", the root, has already been instantiated"; + throw UnknownCompartment( root_.comp_index, msg ); + } + root_ = *compartment; + } + + compartment_indices_.push_back( compartment->comp_index ); + + set_compartments(); +} + +/** + * Get the compartment corresponding to the provided index in the tree. + * + * This function gets the compartments by a recursive search through the tree. + * + * The overloaded functions looks only in the subtree of the provided compartment, + * and also has the option to throw an error if no compartment corresponding to + * `compartment_index` is found in the tree + */ +nest::Compartment{{cm_unique_suffix}}* +nest::CompTree{{cm_unique_suffix}}::get_compartment( const long compartment_index ) const +{ + return get_compartment( compartment_index, get_root(), 1 ); +} + +nest::Compartment{{cm_unique_suffix}}* +nest::CompTree{{cm_unique_suffix}}::get_compartment( const long compartment_index, Compartment{{cm_unique_suffix}}* compartment, const long raise_flag ) const +{ + Compartment{{cm_unique_suffix}}* r_compartment = nullptr; + + if ( compartment->comp_index == compartment_index ) + { + r_compartment = compartment; + } + else + { + auto child_it = compartment->children.begin(); + while ( ( not r_compartment ) && child_it != compartment->children.end() ) + { + r_compartment = get_compartment( compartment_index, &( *child_it ), 0 ); + ++child_it; + } + } + + if ( ( not r_compartment ) && raise_flag ) + { + std::string msg = "does not exist in tree"; + throw UnknownCompartment( compartment_index, msg ); + } + + return r_compartment; +} + +/** + * Get the compartment corresponding to the provided index in the tree. Optimized + * trough the use of a pointer vector containing all compartments. Calling this + * function before CompTree{{cm_unique_suffix}}::init_pointers() is called will result in a segmentation + * fault + */ +nest::Compartment{{cm_unique_suffix}}* +nest::CompTree{{cm_unique_suffix}}::get_compartment_opt( const long compartment_idx ) const +{ + return compartments_[ compartment_idx ]; +} + +/** + * Initialize all tree structure pointers + */ +void +nest::CompTree{{cm_unique_suffix}}::init_pointers() +{ + set_parents(); + set_compartments(); + set_leafs(); +} + +/** + * For each compartments, sets its pointer towards its parent compartment + */ +void +nest::CompTree{{cm_unique_suffix}}::set_parents() +{ + for ( auto compartment_idx_it = compartment_indices_.begin(); compartment_idx_it != compartment_indices_.end(); + ++compartment_idx_it ) + { + Compartment{{cm_unique_suffix}}* comp_ptr = get_compartment( *compartment_idx_it ); + // will be nullptr if root + Compartment{{cm_unique_suffix}}* parent_ptr = get_compartment( comp_ptr->p_index, &root_, 0 ); + comp_ptr->parent = parent_ptr; + } +} + +/** + * Creates a vector of compartment pointers, organized in the order in which they were + * added by `add_compartment()` + */ +void +nest::CompTree{{cm_unique_suffix}}::set_compartments() +{ + compartments_.clear(); + + for ( auto compartment_idx_it = compartment_indices_.begin(); compartment_idx_it != compartment_indices_.end(); + ++compartment_idx_it ) + { + compartments_.push_back( get_compartment( *compartment_idx_it ) ); + } +} + +/** + * Creates a vector of compartment pointers of compartments that are also leafs of the tree. + */ +void +nest::CompTree{{cm_unique_suffix}}::set_leafs() +{ + leafs_.clear(); + for ( auto compartment_it = compartments_.begin(); compartment_it != compartments_.end(); ++compartment_it ) + { + if ( int( ( *compartment_it )->children.size() ) == 0 ) + { + leafs_.push_back( *compartment_it ); + } + } +} + +/** + * Initializes pointers for the spike buffers for all synapse receptors + */ +void +nest::CompTree{{cm_unique_suffix}}::set_syn_buffers( std::vector< RingBuffer >& syn_buffers ) +{ + for ( auto compartment_it = compartments_.begin(); compartment_it != compartments_.end(); ++compartment_it ) + { + ( *compartment_it )->compartment_currents.set_syn_buffers( syn_buffers ); + } +} + +/** + * Returns a map of variable names and pointers to the recordables + */ +std::map< Name, double* > +nest::CompTree{{cm_unique_suffix}}::get_recordables() +{ + std::map< Name, double* > recordables; + + /** + * add recordables for all compartments, suffixed by compartment_idx, + * to "recordables" + */ + for ( auto compartment_it = compartments_.begin(); compartment_it != compartments_.end(); ++compartment_it ) + { + std::map< Name, double* > recordables_comp = ( *compartment_it )->get_recordables(); + recordables.insert( recordables_comp.begin(), recordables_comp.end() ); + } + return recordables; +} + +/** + * Initialize state variables + */ +void +{%- if nest_version.startswith("v2") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") %} +nest::CompTree{{cm_unique_suffix}}::calibrate() +{%- else %} +nest::CompTree{{cm_unique_suffix}}::pre_run_hook() +{%- endif %} +{ + if ( root_.comp_index < 0 ) + { + std::string msg = "does not exist in tree, meaning that no compartments have been added"; + throw UnknownCompartment( 0, msg ); + } + + // initialize the compartments + for ( auto compartment_it = compartments_.begin(); compartment_it != compartments_.end(); ++compartment_it ) + { +{%- if nest_version.startswith("v2") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") %} + ( *compartment_it )->calibrate(); +{%- else %} + ( *compartment_it )->pre_run_hook(); +{%- endif %} + } +} + +/** + * Returns vector of voltage values, indices correspond to compartments in `compartments_` + */ +std::vector< double > +nest::CompTree{{cm_unique_suffix}}::get_voltage() const +{ + std::vector< double > v_comps; + for ( auto compartment_it = compartments_.cbegin(); compartment_it != compartments_.cend(); ++compartment_it ) + { + v_comps.push_back( ( *compartment_it )->v_comp ); + } + return v_comps; +} + +/** + * Return voltage of single compartment voltage, indicated by the compartment_index + */ +double +nest::CompTree{{cm_unique_suffix}}::get_compartment_voltage( const long compartment_index ) +{ + return compartments_[ compartment_index ]->v_comp; +} + +/** + * Construct the matrix equation to be solved to advance the model one timestep + */ +void +nest::CompTree{{cm_unique_suffix}}::construct_matrix( const long lag ) +{ + for ( auto compartment_it = compartments_.begin(); compartment_it != compartments_.end(); ++compartment_it ) + { + ( *compartment_it )->construct_matrix_element( lag ); + } +} + +/** + * Solve matrix with O(n) algorithm + */ +void +nest::CompTree{{cm_unique_suffix}}::solve_matrix() +{ + std::vector< Compartment{{cm_unique_suffix}}* >::iterator leaf_it = leafs_.begin(); + + // start the down sweep (puts to zero the sub diagonal matrix elements) + solve_matrix_downsweep( leafs_[ 0 ], leaf_it ); + + // do up sweep to set voltages + solve_matrix_upsweep( &root_, 0.0 ); +} + +void +nest::CompTree{{cm_unique_suffix}}::solve_matrix_downsweep( Compartment{{cm_unique_suffix}}* compartment, std::vector< Compartment{{cm_unique_suffix}}* >::iterator leaf_it ) +{ + // compute the input output transformation at compartment + std::pair< double, double > output = compartment->io(); + + // move on to the parent layer + if ( compartment->parent != nullptr ) + { + Compartment{{cm_unique_suffix}}* parent = compartment->parent; + // gather input from child layers + parent->gather_input( output ); + // move on to next compartments + ++parent->n_passed; + if ( parent->n_passed == int( parent->children.size() ) ) + { + parent->n_passed = 0; + // move on to next compartment + solve_matrix_downsweep( parent, leaf_it ); + } + else + { + // start at next leaf + ++leaf_it; + if ( leaf_it != leafs_.end() ) + { + solve_matrix_downsweep( *leaf_it, leaf_it ); + } + } + } +} + +void +nest::CompTree{{cm_unique_suffix}}::solve_matrix_upsweep( Compartment{{cm_unique_suffix}}* compartment, double vv ) +{ + // compute compartment voltage + vv = compartment->calc_v( vv ); + // move on to child compartments + for ( auto child_it = compartment->children.begin(); child_it != compartment->children.end(); ++child_it ) + { + solve_matrix_upsweep( &( *child_it ), vv ); + } +} + +/** + * Print the tree graph + */ +void +nest::CompTree{{cm_unique_suffix}}::print_tree() const +{ + // loop over all compartments + std::printf( ">>> CM tree with %d compartments <<<\n", int( compartments_.size() ) ); + for ( int ii = 0; ii < int( compartments_.size() ); ++ii ) + { + Compartment{{cm_unique_suffix}}* compartment = compartments_[ ii ]; + std::cout << " Compartment{{cm_unique_suffix}} " << compartment->comp_index << ": "; + std::cout << "C_m = " << compartment->ca << " nF, "; + std::cout << "g_L = " << compartment->gl << " uS, "; + std::cout << "e_L = " << compartment->el << " mV, "; + if ( compartment->parent != nullptr ) + { + std::cout << "Parent " << compartment->parent->comp_index << " --> "; + std::cout << "g_c = " << compartment->gc << " uS, "; + } + std::cout << std::endl; + } + std::cout << std::endl; +} diff --git a/pynestml/codegeneration/resources_nest_compartmental/cm_neuron/cm_tree_@NEURON_NAME@.h.jinja2 b/pynestml/codegeneration/resources_nest_compartmental/cm_neuron/cm_tree_@NEURON_NAME@.h.jinja2 new file mode 100644 index 000000000..fe1942c03 --- /dev/null +++ b/pynestml/codegeneration/resources_nest_compartmental/cm_neuron/cm_tree_@NEURON_NAME@.h.jinja2 @@ -0,0 +1,250 @@ +{#- +cm_tree_@NEURON_NAME@.h.jinja2 + +This file is part of NEST. + +Copyright (C) 2004 The NEST Initiative + +NEST is free software: you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation, either version 2 of the License, or +(at your option) any later version. + +NEST is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with NEST. If not, see . +#} +{%- if tracing %}/* generated by {{self._TemplateReference__context.name}} */ {% endif %} +{%- import 'directives_cpp/SpikeBufferGetter.jinja2' as buffer_getter with context %} +{%- import 'directives_cpp/ContinuousInputBufferGetter.jinja2' as continuous_buffer_getter with context %} +{%- import 'directives_cpp/BufferDeclaration.jinja2' as buffer_declaration with context %} +{%- import 'directives_cpp/BufferDeclarationValue.jinja2' as buffer_declaration_value with context %} +{%- import 'directives_cpp/FunctionDeclaration.jinja2' as function_declaration with context %} +{%- import 'directives_cpp/OutputEvent.jinja2' as output_event with context %} +/* + * {{neuronSpecificFileNamesCmSyns["tree"]}}.h + * + * This file is part of NEST. + * + * Copyright (C) 2004 The NEST Initiative + * + * NEST is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 2 of the License, or + * (at your option) any later version. + * + * NEST is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with NEST. If not, see . + * + */ + +#ifndef CM_TREE_{{cm_unique_suffix | upper }}_H +#define CM_TREE_{{cm_unique_suffix | upper }}_H + +#include + +#include "nest_time.h" +#include "ring_buffer.h" + +// compartmental model +#include "{{neuronSpecificFileNamesCmSyns["compartmentcurrents"]}}.h" + +// Includes from libnestutil: +#include "dict_util.h" +#include "numerics.h" + +// Includes from nestkernel: +#include "exceptions.h" +#include "kernel_manager.h" +#include "universal_data_logger_impl.h" + +// Includes from sli: +#include "dict.h" +#include "dictutils.h" + + +namespace nest +{ + +class Compartment{{cm_unique_suffix}} +{ +private: + // aggragators for numerical integration + double xx_; + double yy_; + +public: + // compartment index + long comp_index; + // parent compartment index + long p_index; + // tree structure indices + Compartment{{cm_unique_suffix}}* parent; + std::vector< Compartment{{cm_unique_suffix}} > children; + // vector for synapses + CompartmentCurrents{{cm_unique_suffix}} compartment_currents; + + // buffer for currents + RingBuffer currents; + // voltage variable + double v_comp; + // electrical parameters + double ca; // compartment capacitance [uF] + double gc; // coupling conductance with parent (meaningless if root) [uS] + double gl; // leak conductance of compartment [uS] + double el; // leak current reversal potential [mV] + // auxiliary variables for efficienchy + double gg0; + double ca__div__dt; + double gl__div__2; + double gc__div__2; + double gl__times__el; + // for numerical integration + double ff; + double gg; + double hh; + // passage counter for recursion + int n_passed; + + // constructor, destructor + Compartment{{cm_unique_suffix}}( const long compartment_index, const long parent_index ); + Compartment{{cm_unique_suffix}}( const long compartment_index, const long parent_index, const DictionaryDatum& compartment_params ); + ~Compartment{{cm_unique_suffix}}(){}; + + // initialization +{%- if nest_version.startswith("v2") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") %} + void calibrate(); +{%- else %} + void pre_run_hook(); +{%- endif %} + std::map< Name, double* > get_recordables(); + + // matrix construction + void construct_matrix_element( const long lag ); + + // maxtrix inversion + inline void gather_input( const std::pair< double, double >& in ); + inline std::pair< double, double > io(); + inline double calc_v( const double v_in ); +}; // Compartment + + +/* +Short helper functions for solving the matrix equation. Can hopefully be inlined +*/ +inline void +nest::Compartment{{cm_unique_suffix}}::gather_input( const std::pair< double, double >& in ) +{ + xx_ += in.first; + yy_ += in.second; +} +inline std::pair< double, double > +nest::Compartment{{cm_unique_suffix}}::io() +{ + // include inputs from child compartments + gg -= xx_; + ff -= yy_; + + // output values + double g_val( hh * hh / gg ); + double f_val( ff * hh / gg ); + + return std::make_pair( g_val, f_val ); +} +inline double +nest::Compartment{{cm_unique_suffix}}::calc_v( const double v_in ) +{ + // reset recursion variables + xx_ = 0.0; + yy_ = 0.0; + + // compute voltage + v_comp = ( ff - v_in * hh ) / gg; + + return v_comp; +} + + +class CompTree{{cm_unique_suffix}} +{ +private: + /* + structural data containers for the compartment model + */ + mutable Compartment{{cm_unique_suffix}} root_; + std::vector< long > compartment_indices_; + std::vector< Compartment{{cm_unique_suffix}}* > compartments_; + std::vector< Compartment{{cm_unique_suffix}}* > leafs_; + + long size_ = 0; + + // recursion functions for matrix inversion + void solve_matrix_downsweep( Compartment{{cm_unique_suffix}}* compartment_ptr, std::vector< Compartment{{cm_unique_suffix}}* >::iterator leaf_it ); + void solve_matrix_upsweep( Compartment{{cm_unique_suffix}}* compartment, double vv ); + + // functions for pointer initialization + void set_parents(); + void set_compartments(); + void set_leafs(); + +public: + // constructor, destructor + CompTree{{cm_unique_suffix}}(); + ~CompTree{{cm_unique_suffix}}(){}; + + // initialization functions for tree structure + void add_compartment( const long parent_index ); + void add_compartment( const long parent_index, const DictionaryDatum& compartment_params ); + void add_compartment( Compartment{{cm_unique_suffix}}* compartment, const long parent_index ); +{%- if nest_version.startswith("v2") or nest_version.startswith("v3.1") or nest_version.startswith("v3.2") or nest_version.startswith("v3.3") %} + void calibrate(); +{%- else %} + void pre_run_hook(); +{%- endif %} + + void init_pointers(); + void set_syn_buffers( std::vector< RingBuffer >& syn_buffers ); + std::map< Name, double* > get_recordables(); + + // get a compartment pointer from the tree + Compartment{{cm_unique_suffix}}* get_compartment( const long compartment_index ) const; + Compartment{{cm_unique_suffix}}* get_compartment( const long compartment_index, Compartment{{cm_unique_suffix}}* compartment, const long raise_flag ) const; + Compartment{{cm_unique_suffix}}* get_compartment_opt( const long compartment_indx ) const; + Compartment{{cm_unique_suffix}}* + get_root() const + { + return &root_; + }; + + // get tree size (number of compartments) + long + get_size() const + { + return size_; + }; + + // get voltage values + std::vector< double > get_voltage() const; + double get_compartment_voltage( const long compartment_index ); + + // construct the numerical integration matrix and vector + void construct_matrix( const long lag ); + // solve the matrix equation for next timestep voltage + void solve_matrix(); + + // print function + void print_tree() const; +}; // CompTree + +} // namespace + +#endif /* #ifndef CM_TREE_{{cm_unique_suffix | upper }}_H */ diff --git a/pynestml/codegeneration/resources_nest_compartmental/cm_neuron/directives_cpp b/pynestml/codegeneration/resources_nest_compartmental/cm_neuron/directives_cpp new file mode 120000 index 000000000..311ba16de --- /dev/null +++ b/pynestml/codegeneration/resources_nest_compartmental/cm_neuron/directives_cpp @@ -0,0 +1 @@ +../../resources_nest/point_neuron/directives_cpp \ No newline at end of file diff --git a/pynestml/codegeneration/resources_nest_compartmental/cm_neuron/setup/@MODULE_NAME@.cpp.jinja2 b/pynestml/codegeneration/resources_nest_compartmental/cm_neuron/setup/@MODULE_NAME@.cpp.jinja2 new file mode 100644 index 000000000..da06e1041 --- /dev/null +++ b/pynestml/codegeneration/resources_nest_compartmental/cm_neuron/setup/@MODULE_NAME@.cpp.jinja2 @@ -0,0 +1,126 @@ +{#/* +* @MODULE_NAME@.cpp.jinja2 +* +* This file is part of NEST. +* +* Copyright (C) 2004 The NEST Initiative +* +* NEST is free software: you can redistribute it and/or modify +* it under the terms of the GNU General Public License as published by +* the Free Software Foundation, either version 2 of the License, or +* (at your option) any later version. +* +* NEST is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* +* You should have received a copy of the GNU General Public License +* along with NEST. If not, see . +* +*/#} +{%- if tracing %}/* generated by {{self._TemplateReference__context.name}} */ {% endif %} +/* +* {{moduleName}}.cpp +* +* This file is part of NEST. +* +* Copyright (C) 2004 The NEST Initiative +* +* NEST is free software: you can redistribute it and/or modify +* it under the terms of the GNU General Public License as published by +* the Free Software Foundation, either version 2 of the License, or +* (at your option) any later version. +* +* NEST is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* +* You should have received a copy of the GNU General Public License +* along with NEST. If not, see . +* +* {{now}} +*/ + +// Includes from nestkernel: +#include "connection_manager_impl.h" +#include "connector_model_impl.h" +#include "dynamicloader.h" +#include "exceptions.h" +#include "genericmodel_impl.h" +#include "kernel_manager.h" +#include "model.h" +#include "model_manager_impl.h" +#include "nestmodule.h" +#include "target_identifier.h" + +// Includes from sli: +#include "booldatum.h" +#include "integerdatum.h" +#include "sliexceptions.h" +#include "tokenarray.h" + +// include headers with your own stuff +#include "{{moduleName}}.h" + +{% for neuron in neurons %} +#include "{{perNeuronFileNamesCm[neuron.get_name()]["main"]}}.h" +{% endfor %} + +// -- Interface to dynamic module loader --------------------------------------- + +/* +* There are three scenarios, in which MyModule can be loaded by NEST: +* +* 1) When loading your module with `Install`, the dynamic module loader must +* be able to find your module. You make the module known to the loader by +* defining an instance of your module class in global scope. (LTX_MODULE is +* defined) This instance must have the name +* +* _LTX_mod +* +* The dynamicloader can then load modulename and search for symbol "mod" in it. +* +* 2) When you link the library dynamically with NEST during compilation, a new +* object has to be created. In the constructor the DynamicLoaderModule will +* register your module. (LINKED_MODULE is defined) +* +* 3) When you link the library statically with NEST during compilation, the +* registration will take place in the file `static_modules.h`, which is +* generated by cmake. +*/ +#if defined(LTX_MODULE) | defined(LINKED_MODULE) +{{moduleName}} {{moduleName}}_LTX_mod; +#endif + +// -- DynModule functions ------------------------------------------------------ + +{{moduleName}}::{{moduleName}}() +{ +#ifdef LINKED_MODULE + // register this module at the dynamic loader + // this is needed to allow for linking in this module at compile time + // all registered modules will be initialized by the main app's dynamic loader + nest::DynamicLoaderModule::registerLinkedModule( this ); +#endif +} + +{{moduleName}}::~{{moduleName}}() +{ +} + +const std::string +{{moduleName}}::name() const +{ + return std::string("{{moduleName}}"); // Return name of the module +} + +//------------------------------------------------------------------------------------- +void +{{moduleName}}::init( SLIInterpreter* i ) +{ + {% for neuron in neurons %} +nest::kernel().model_manager.register_node_model< nest::{{ perNeuronFileNamesCm[neuron.get_name()]["main"] }} >("{{ perNeuronFileNamesCm[neuron.get_name()]["main"] }}"); + {% endfor %} +} // {{moduleName}}::init() diff --git a/pynestml/codegeneration/resources_nest_compartmental/cm_neuron/setup/@MODULE_NAME@.h.jinja2 b/pynestml/codegeneration/resources_nest_compartmental/cm_neuron/setup/@MODULE_NAME@.h.jinja2 new file mode 100644 index 000000000..9207d1585 --- /dev/null +++ b/pynestml/codegeneration/resources_nest_compartmental/cm_neuron/setup/@MODULE_NAME@.h.jinja2 @@ -0,0 +1,92 @@ +{# + * @MODULE_NAME@.h.jinja2 + * + * This file is part of NEST. + * + * Copyright (C) 2004 The NEST Initiative + * + * NEST is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 2 of the License, or + * (at your option) any later version. + * + * NEST is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with NEST. If not, see . + * +#} +{%- if tracing %}/* generated by {{self._TemplateReference__context.name}} */ {% endif %} +/*{% set upperModuleName = moduleName.upper() %} + * {{moduleName}}.h + * + * This file is part of NEST. + * + * Copyright (C) 2004 The NEST Initiative + * + * NEST is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 2 of the License, or + * (at your option) any later version. + * + * NEST is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with NEST. If not, see . + * + * {{now}} + */ + +#ifndef {{upperModuleName}}_H +#define {{upperModuleName}}_H + +#include "slimodule.h" +#include "slifunction.h" + +#include "nest.h" +#include "nest_impl.h" + + +/** +* Class defining your model. +* @note For each model, you must define one such class, with a unique name. +*/ +class {{moduleName}} : public SLIModule +{ +public: + // Interface functions ------------------------------------------ + + /** + * @note The constructor registers the module with the dynamic loader. + * Initialization proper is performed by the init() method. + */ + {{moduleName}}(); + + /** + * @note The destructor does not do much in modules. + */ + ~{{moduleName}}(); + + /** + * Initialize module by registering models with the network. + * @param SLIInterpreter* SLI interpreter + */ + void init( SLIInterpreter* ); + + /** + * Return the name of your model. + */ + const std::string name() const; + +public: + // Classes implementing your functions ----------------------------- + +}; + +#endif diff --git a/pynestml/codegeneration/resources_nest_compartmental/cm_neuron/setup/CMakeLists.txt.jinja2 b/pynestml/codegeneration/resources_nest_compartmental/cm_neuron/setup/CMakeLists.txt.jinja2 new file mode 100644 index 000000000..dc0c1f506 --- /dev/null +++ b/pynestml/codegeneration/resources_nest_compartmental/cm_neuron/setup/CMakeLists.txt.jinja2 @@ -0,0 +1,290 @@ +# +# CMakeLists.txt.jinja2 +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . +# +# {{moduleName}}/CMakeLists.txt +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . + +# This CMakeLists.txt is configured to build your external module for NEST. For +# illustrative reasons this module is called 'my' (change SHORT_NAME to your +# preferred module name). NEST requires you to extend the 'SLIModule' (see +# mymodule.h and mymodule.cpp as an example) and provide a module header +# (see MODULE_HEADER). The subsequent instructions +# +# The configuration requires a compiled and installed NEST; if `nest-config` is +# not in the PATH, please specify the absolute path with `-Dwith-nest=...`. +# +# For more informations on how to extend and use your module see: +# https://nest.github.io/nest-simulator/extension_modules + +# 1) Name your module here, i.e. add later with -Dexternal-modules=${moduleName}: +set( SHORT_NAME {{moduleName}} ) + +# the complete module name is here: +set( MODULE_NAME ${SHORT_NAME} ) + +# 2) Add all your sources here +set( MODULE_SOURCES + {{moduleName}}.h {{moduleName}}.cpp + {%- for neuron in neurons %} + {{perNeuronFileNamesCm[neuron.get_name()]["compartmentcurrents"]}}.cpp {{perNeuronFileNamesCm[neuron.get_name()]["compartmentcurrents"]}}.h + {{perNeuronFileNamesCm[neuron.get_name()]["main"]}}.cpp {{perNeuronFileNamesCm[neuron.get_name()]["main"]}}.h + {{perNeuronFileNamesCm[neuron.get_name()]["tree"]}}.cpp {{perNeuronFileNamesCm[neuron.get_name()]["tree"]}}.h + {% endfor -%} + + {# currently this will be empty as there are no shared files #} + {%- for cm_file_name in sharedFileNamesCmSyns.values() %} + {{cm_file_name}}.cpp {{cm_file_name}}.h + {% endfor -%} + ) + +# 3) We require a header name like this: +set( MODULE_HEADER ${MODULE_NAME}.h ) +# containing the class description of the class extending the SLIModule + +# 4) Specify your module version +set( MODULE_VERSION_MAJOR 1 ) +set( MODULE_VERSION_MINOR 0 ) +set( MODULE_VERSION "${MODULE_VERSION_MAJOR}.${MODULE_VERSION_MINOR}" ) + +# Leave the call to "project(...)" for after the compiler is determined. + +# Set the `nest-config` executable to use during configuration. +set( with-nest OFF CACHE STRING "Specify the `nest-config` executable." ) + +# If it is not set, look for a `nest-config` in the PATH. +if ( NOT with-nest ) + # try find the program ourselves + find_program( NEST_CONFIG + NAMES nest-config + ) + if ( NEST_CONFIG STREQUAL "NEST_CONFIG-NOTFOUND" ) + message( FATAL_ERROR "Cannot find the program `nest-config`. Specify via -Dwith-nest=... ." ) + endif () +else () + set( NEST_CONFIG ${with-nest} ) +endif () + +# Use `nest-config` to get the compile and installation options used with the +# NEST installation. + +# Get the compiler that was used for NEST. +execute_process( + COMMAND ${NEST_CONFIG} --compiler + RESULT_VARIABLE RES_VAR + OUTPUT_VARIABLE NEST_COMPILER + OUTPUT_STRIP_TRAILING_WHITESPACE +) + +# One check on first execution, if `nest-config` is working. +if ( NOT RES_VAR EQUAL 0 ) + message( FATAL_ERROR "Cannot run `${NEST_CONFIG}`. Please specify correct `nest-config` via -Dwith-nest=... " ) +endif () + +# Setting the compiler has to happen before the call to "project(...)" function. +set( CMAKE_CXX_COMPILER "${NEST_COMPILER}" ) + +project( ${MODULE_NAME} CXX ) + +# Get the install prefix. +execute_process( + COMMAND ${NEST_CONFIG} --prefix + RESULT_VARIABLE RES_VAR + OUTPUT_VARIABLE NEST_PREFIX + OUTPUT_STRIP_TRAILING_WHITESPACE +) + +# Get the CXXFLAGS. +execute_process( + COMMAND ${NEST_CONFIG} --cflags + RESULT_VARIABLE RES_VAR + OUTPUT_VARIABLE NEST_CXXFLAGS + OUTPUT_STRIP_TRAILING_WHITESPACE +) + +# Get the Includes. +execute_process( + COMMAND ${NEST_CONFIG} --includes + RESULT_VARIABLE RES_VAR + OUTPUT_VARIABLE NEST_INCLUDES + OUTPUT_STRIP_TRAILING_WHITESPACE +) +if ( NEST_INCLUDES ) + # make a cmake list + string( REPLACE " " ";" NEST_INCLUDES_LIST "${NEST_INCLUDES}" ) + foreach ( inc_complete ${NEST_INCLUDES_LIST} ) + # if it is actually a -Iincludedir + if ( "${inc_complete}" MATCHES "^-I.*" ) + # get the directory + string( REGEX REPLACE "^-I(.*)" "\\1" inc "${inc_complete}" ) + # and check whether it is a directory + if ( IS_DIRECTORY "${inc}" ) + include_directories( "${inc}" ) + endif () + endif () + endforeach () +endif () + +# Get, if NEST is build as a (mostly) static application. If yes, also only build +# static library. +execute_process( + COMMAND ${NEST_CONFIG} --static-libraries + RESULT_VARIABLE RES_VAR + OUTPUT_VARIABLE NEST_STATIC_LIB + OUTPUT_STRIP_TRAILING_WHITESPACE +) +if ( NEST_STATIC_LIB ) + set( BUILD_SHARED_LIBS OFF ) +else () + set( BUILD_SHARED_LIBS ON ) +endif () + +# Get all linked libraries. +execute_process( + COMMAND ${NEST_CONFIG} --libs + RESULT_VARIABLE RES_VAR + OUTPUT_VARIABLE NEST_LIBS + OUTPUT_STRIP_TRAILING_WHITESPACE +) + +# on OS X +set( CMAKE_MACOSX_RPATH ON ) + + +if(CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT) + # Use the `NEST_PREFIX` as `CMAKE_INSTALL_PREFIX`. + set( CMAKE_INSTALL_PREFIX ${NEST_PREFIX} CACHE STRING "Install path prefix, prepended onto install directories." FORCE ) + + # Retrieve libs folder in nest + execute_process( + COMMAND ${NEST_CONFIG} --libdir + RESULT_VARIABLE RES_VAR + OUTPUT_VARIABLE NEST_LIBDIR + OUTPUT_STRIP_TRAILING_WHITESPACE) + + # Append lib/nest to the install_dir + set( CMAKE_INSTALL_LIBDIR "${NEST_LIBDIR}/nest" CACHE STRING "object code libraries (lib/nest or lib64/nest or lib//nest on Debian)" FORCE ) +else() + # Check If CMAKE_INSTALL_PREFIX is not empty string + if("${CMAKE_INSTALL_PREFIX}" STREQUAL "") + message(FATAL_ERROR "CMAKE_INSTALL_PREFIX cannot be an empty string") + endif() + + # Set lib folder to the given install_dir + set( CMAKE_INSTALL_LIBDIR ${CMAKE_INSTALL_PREFIX} CACHE STRING "object code libraries (lib/nest or lib64/nest or lib//nest on Debian)" FORCE ) +endif() + + +include( GNUInstallDirs ) + +# CPack stuff. Required for target `dist`. +set( CPACK_GENERATOR TGZ ) +set( CPACK_SOURCE_GENERATOR TGZ ) + +set( CPACK_PACKAGE_DESCRIPTION_SUMMARY "NEST Module ${MODULE_NAME}" ) +set( CPACK_PACKAGE_VENDOR "NEST Initiative (http://www.nest-initiative.org/)" ) + +set( CPACK_PACKAGE_VERSION_MAJOR ${MODULE_VERSION_MAJOR} ) +set( CPACK_PACKAGE_VERSION_MINOR ${MODULE_VERSION_MINOR} ) +set( CPACK_PACKAGE_VERSION ${MODULE_VERSION} ) + +set( CPACK_SOURCE_IGNORE_FILES + "\\\\.gitignore" + "\\\\.git/" + "\\\\.travis\\\\.yml" + + # if we have in source builds + "/build/" + "/_CPack_Packages/" + "CMakeFiles/" + "cmake_install\\\\.cmake" + "Makefile.*" + "CMakeCache\\\\.txt" + "CPackConfig\\\\.cmake" + "CPackSourceConfig\\\\.cmake" + ) +set( CPACK_SOURCE_PACKAGE_FILE_NAME ${MODULE_NAME} ) + +set( CPACK_PACKAGE_INSTALL_DIRECTORY "${MODULE_NAME} ${MODULE_VERSION}" ) +include( CPack ) + +# add make dist target +add_custom_target( dist + COMMAND ${CMAKE_MAKE_PROGRAM} package_source + # not sure about this... seems, that it will be removed before dist... + # DEPENDS doc + COMMENT "Creating a source distribution from ${MODULE_NAME}..." + ) + + +if ( BUILD_SHARED_LIBS ) + # When building shared libraries, also create a module for loading at runtime + # with the `Install` command. + add_library( ${MODULE_NAME}_module MODULE ${MODULE_SOURCES} ) + set_target_properties( ${MODULE_NAME}_module + PROPERTIES + COMPILE_FLAGS "${NEST_CXXFLAGS} -DLTX_MODULE" + LINK_FLAGS "${NEST_LIBS}" + PREFIX "" + OUTPUT_NAME ${MODULE_NAME} ) + install( TARGETS ${MODULE_NAME}_module + DESTINATION ${CMAKE_INSTALL_LIBDIR} + ) +endif () + +message( "" ) +message( "-------------------------------------------------------" ) +message( "${MODULE_NAME} Configuration Summary" ) +message( "-------------------------------------------------------" ) +message( "" ) +message( "C++ compiler : ${CMAKE_CXX_COMPILER}" ) +message( "Build static libs : ${NEST_STATIC_LIB}" ) +message( "C++ compiler flags : ${CMAKE_CXX_FLAGS}" ) +message( "NEST compiler flags : ${NEST_CXXFLAGS}" ) +message( "NEST include dirs : ${NEST_INCLUDES}" ) +message( "NEST libraries flags : ${NEST_LIBS}" ) +message( "" ) +message( "-------------------------------------------------------" ) +message( "" ) +message( "You can now build and install '${MODULE_NAME}' using" ) +message( " make" ) +message( " make install" ) +message( "" ) +message( "The library file lib${MODULE_NAME}.so will be installed to" ) +message( " ${CMAKE_INSTALL_FULL_LIBDIR}" ) +message( "The module can be loaded into NEST using" ) +message( " (${MODULE_NAME}) Install (in SLI)" ) +message( " nest.Install(${MODULE_NAME}) (in PyNEST)" ) +message( "" ) diff --git a/pynestml/codegeneration/resources_nest_compartmental/cm_neuron/setup/__init__.py b/pynestml/codegeneration/resources_nest_compartmental/cm_neuron/setup/__init__.py new file mode 100644 index 000000000..ec6cd5167 --- /dev/null +++ b/pynestml/codegeneration/resources_nest_compartmental/cm_neuron/setup/__init__.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +# +# __init__.py +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . + +# --------------------------------------------------------------- +# Caution: This file is required to enable Python to also include the templates +# --------------------------------------------------------------- diff --git a/pynestml/frontend/frontend_configuration.py b/pynestml/frontend/frontend_configuration.py index fdcbf002e..7ca1551ad 100644 --- a/pynestml/frontend/frontend_configuration.py +++ b/pynestml/frontend/frontend_configuration.py @@ -72,6 +72,7 @@ class FrontendConfiguration: target = None install_path = None target_path = None + target_platform = "" module_name = None store_log = False suffix = "" diff --git a/pynestml/frontend/pynestml_frontend.py b/pynestml/frontend/pynestml_frontend.py index 00cc87f6f..7139bb4c1 100644 --- a/pynestml/frontend/pynestml_frontend.py +++ b/pynestml/frontend/pynestml_frontend.py @@ -45,14 +45,15 @@ def get_known_targets(): - targets = ["NEST", "python_standalone", "autodoc", "spinnaker", "none"] + targets = ["NEST", "NEST_compartmental", "python_standalone", "autodoc", "spinnaker", "none"] targets = [s.upper() for s in targets] return targets def transformers_from_target_name(target_name: str, options: Optional[Mapping[str, Any]] = None) -> Tuple[Transformer, Dict[str, Any]]: """Static factory method that returns a list of new instances of a child class of Transformers""" - assert target_name.upper() in get_known_targets(), "Unknown target platform requested: \"" + str(target_name) + "\"" + assert target_name.upper() in get_known_targets( + ), "Unknown target platform requested: \"" + str(target_name) + "\"" # default: no transformers (empty list); options unchanged transformers: List[Transformer] = [] @@ -64,7 +65,8 @@ def transformers_from_target_name(target_name: str, options: Optional[Mapping[st # rewrite all C++ keywords # from: https://docs.microsoft.com/en-us/cpp/cpp/keywords-cpp 2022-04-23 - variable_name_rewriter = IllegalVariableNameTransformer({"forbidden_names": ["alignas", "alignof", "and", "and_eq", "asm", "auto", "bitand", "bitor", "bool", "break", "case", "catch", "char", "char8_t", "char16_t", "char32_t", "class", "compl", "concept", "const", "const_cast", "consteval", "constexpr", "constinit", "continue", "co_await", "co_return", "co_yield", "decltype", "default", "delete", "do", "double", "dynamic_cast", "else", "enum", "explicit", "export", "extern", "false", "float", "for", "friend", "goto", "if", "inline", "int", "long", "mutable", "namespace", "new", "noexcept", "not", "not_eq", "nullptr", "operator", "or", "or_eq", "private", "protected", "public", "register", "reinterpret_cast", "requires", "return", "short", "signed", "sizeof", "static", "static_assert", "static_cast", "struct", "switch", "template", "this", "thread_local", "throw", "true", "try", "typedef", "typeid", "typename", "union", "unsigned", "using", "virtual", "void", "volatile", "wchar_t", "while", "xor", "xor_eq"]}) + variable_name_rewriter = IllegalVariableNameTransformer({"forbidden_names": ["alignas", "alignof", "and", "and_eq", "asm", "auto", "bitand", "bitor", "bool", "break", "case", "catch", "char", "char8_t", "char16_t", "char32_t", "class", "compl", "concept", "const", "const_cast", "consteval", "constexpr", "constinit", "continue", "co_await", "co_return", "co_yield", "decltype", "default", "delete", "do", "double", "dynamic_cast", "else", "enum", "explicit", "export", "extern", "false", "float", "for", "friend", + "goto", "if", "inline", "int", "long", "mutable", "namespace", "new", "noexcept", "not", "not_eq", "nullptr", "operator", "or", "or_eq", "private", "protected", "public", "register", "reinterpret_cast", "requires", "return", "short", "signed", "sizeof", "static", "static_assert", "static_cast", "struct", "switch", "template", "this", "thread_local", "throw", "true", "try", "typedef", "typeid", "typename", "union", "unsigned", "using", "virtual", "void", "volatile", "wchar_t", "while", "xor", "xor_eq"]}) transformers.append(variable_name_rewriter) if target_name.upper() in ["SPINNAKER"]: @@ -96,7 +98,8 @@ def transformers_from_target_name(target_name: str, options: Optional[Mapping[st def code_generator_from_target_name(target_name: str, options: Optional[Mapping[str, Any]] = None) -> CodeGenerator: """Static factory method that returns a new instance of a child class of CodeGenerator""" - assert target_name.upper() in get_known_targets(), "Unknown target platform requested: \"" + str(target_name) + "\"" + assert target_name.upper() in get_known_targets( + ), "Unknown target platform requested: \"" + str(target_name) + "\"" if target_name.upper() == "NEST": from pynestml.codegeneration.nest_code_generator import NESTCodeGenerator @@ -108,9 +111,14 @@ def code_generator_from_target_name(target_name: str, options: Optional[Mapping[ if target_name.upper() == "AUTODOC": from pynestml.codegeneration.autodoc_code_generator import AutoDocCodeGenerator - assert options is None or options == {}, "\"autodoc\" code generator does not support options" + assert options is None or options == { + }, "\"autodoc\" code generator does not support options" return AutoDocCodeGenerator() + if target_name.upper() == "NEST_COMPARTMENTAL": + from pynestml.codegeneration.nest_compartmental_code_generator import NESTCompartmentalCodeGenerator + return NESTCompartmentalCodeGenerator() + if target_name.upper() == "SPINNAKER": from pynestml.codegeneration.spinnaker_code_generator import SpiNNakerCodeGenerator return SpiNNakerCodeGenerator(options) @@ -121,7 +129,8 @@ def code_generator_from_target_name(target_name: str, options: Optional[Mapping[ Logger.log_message(None, code, message, None, LoggingLevel.INFO) return CodeGenerator("", options) - assert "Unknown code generator requested: " + target_name # cannot reach here due to earlier assert -- silence + # cannot reach here due to earlier assert -- silence + assert "Unknown code generator requested: " + target_name # static checker warnings @@ -129,9 +138,10 @@ def builder_from_target_name(target_name: str, options: Optional[Mapping[str, An r"""Static factory method that returns a new instance of a child class of Builder""" from pynestml.frontend.pynestml_frontend import get_known_targets - assert target_name.upper() in get_known_targets(), "Unknown target platform requested: \"" + str(target_name) + "\"" + assert target_name.upper() in get_known_targets( + ), "Unknown target platform requested: \"" + str(target_name) + "\"" - if target_name.upper() == "NEST": + if target_name.upper() in ["NEST", "NEST_COMPARTMENTAL"]: from pynestml.codegeneration.nest_builder import NESTBuilder builder = NESTBuilder(options) remaining_options = builder.set_options(options) @@ -320,6 +330,38 @@ def generate_spinnaker_target(input_path: Union[str, Sequence[str]], target_path codegen_opts=codegen_opts) +def generate_nest_compartmental_target(input_path: Union[str, Sequence[str]], target_path: Optional[str] = None, + install_path: Optional[str] = None, logging_level="ERROR", + module_name=None, store_log: bool = False, suffix: str = "", + dev: bool = False, codegen_opts: Optional[Mapping[str, Any]] = None): + r"""Generate and build compartmental model code for NEST Simulator. + + Parameters + ---------- + input_path : str **or** Sequence[str] + Path to the NESTML file(s) or to folder(s) containing NESTML files to convert to NEST code. + target_path : str, optional (default: append "target" to `input_path`) + Path to the generated C++ code and install files. + install_path + Path to the directory where the generated code will be installed. + logging_level : str, optional (default: "ERROR") + Sets which level of information should be displayed duing code generation (among "ERROR", "WARNING", "INFO", or "NO"). + module_name : str, optional (default: "nestmlmodule") + The name of the generated Python module. + store_log : bool, optional (default: False) + Whether the log should be saved to file. + suffix : str, optional (default: "") + A suffix string that will be appended to the name of all generated models. + dev : bool, optional (default: False) + Enable development mode: code generation is attempted even for models that contain errors, and extra information is rendered in the generated code. + codegen_opts : Optional[Mapping[str, Any]] + A dictionary containing additional options for the target code generator. + """ + generate_target(input_path, target_platform="NEST_compartmental", target_path=target_path, + logging_level=logging_level, module_name=module_name, store_log=store_log, + suffix=suffix, install_path=install_path, dev=dev, codegen_opts=codegen_opts) + + def main() -> int: """ Entry point for the command-line application. diff --git a/pynestml/generated/PyNestMLLexer.py b/pynestml/generated/PyNestMLLexer.py index 89eedca60..0051011cc 100644 --- a/pynestml/generated/PyNestMLLexer.py +++ b/pynestml/generated/PyNestMLLexer.py @@ -1,4 +1,4 @@ -# Generated from PyNestMLLexer.g4 by ANTLR 4.10 +# Generated from PyNestMLLexer.g4 by ANTLR 4.10.1 from antlr4 import * from io import StringIO import sys @@ -437,7 +437,7 @@ class PyNestMLLexer(PyNestMLLexerBase): def __init__(self, input=None, output:TextIO = sys.stdout): super().__init__(input, output) - self.checkVersion("4.10") + self.checkVersion("4.10.1") self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache()) self._actions = None self._predicates = None diff --git a/pynestml/generated/PyNestMLParser.interp b/pynestml/generated/PyNestMLParser.interp deleted file mode 100644 index 36806c74b..000000000 --- a/pynestml/generated/PyNestMLParser.interp +++ /dev/null @@ -1,239 +0,0 @@ -token literal names: -null -null -null -'"""' -null -null -null -null -null -null -'integer' -'real' -'string' -'boolean' -'void' -'function' -'inline' -'return' -'if' -'elif' -'else' -'for' -'while' -'in' -'step' -'inf' -'and' -'or' -'not' -'recordable' -'kernel' -'neuron' -'synapse' -'state' -'parameters' -'internals' -'update' -'equations' -'input' -'output' -'continuous' -'onReceive' -'spike' -'inhibitory' -'excitatory' -'@homogeneous' -'@heterogeneous' -'@' -'...' -'(' -')' -'+' -'~' -'|' -'^' -'&' -'[' -'<-' -']' -'[[' -']]' -'<<' -'>>' -'<' -'>' -'<=' -'+=' -'-=' -'*=' -'/=' -'==' -'!=' -'<>' -'>=' -',' -'-' -'=' -'*' -'**' -'/' -'%' -'?' -':' -'::' -';' -'\'' -null -null -null -null -null - -token symbolic names: -null -INDENT -DEDENT -DOCSTRING_TRIPLEQUOTE -KERNEL_JOINING -WS -LINE_ESCAPE -DOCSTRING -SL_COMMENT -NEWLINE -INTEGER_KEYWORD -REAL_KEYWORD -STRING_KEYWORD -BOOLEAN_KEYWORD -VOID_KEYWORD -FUNCTION_KEYWORD -INLINE_KEYWORD -RETURN_KEYWORD -IF_KEYWORD -ELIF_KEYWORD -ELSE_KEYWORD -FOR_KEYWORD -WHILE_KEYWORD -IN_KEYWORD -STEP_KEYWORD -INF_KEYWORD -AND_KEYWORD -OR_KEYWORD -NOT_KEYWORD -RECORDABLE_KEYWORD -KERNEL_KEYWORD -NEURON_KEYWORD -SYNAPSE_KEYWORD -STATE_KEYWORD -PARAMETERS_KEYWORD -INTERNALS_KEYWORD -UPDATE_KEYWORD -EQUATIONS_KEYWORD -INPUT_KEYWORD -OUTPUT_KEYWORD -CONTINUOUS_KEYWORD -ON_RECEIVE_KEYWORD -SPIKE_KEYWORD -INHIBITORY_KEYWORD -EXCITATORY_KEYWORD -DECORATOR_HOMOGENEOUS -DECORATOR_HETEROGENEOUS -AT -ELLIPSIS -LEFT_PAREN -RIGHT_PAREN -PLUS -TILDE -PIPE -CARET -AMPERSAND -LEFT_SQUARE_BRACKET -LEFT_ANGLE_MINUS -RIGHT_SQUARE_BRACKET -LEFT_LEFT_SQUARE -RIGHT_RIGHT_SQUARE -LEFT_LEFT_ANGLE -RIGHT_RIGHT_ANGLE -LEFT_ANGLE -RIGHT_ANGLE -LEFT_ANGLE_EQUALS -PLUS_EQUALS -MINUS_EQUALS -STAR_EQUALS -FORWARD_SLASH_EQUALS -EQUALS_EQUALS -EXCLAMATION_EQUALS -LEFT_ANGLE_RIGHT_ANGLE -RIGHT_ANGLE_EQUALS -COMMA -MINUS -EQUALS -STAR -STAR_STAR -FORWARD_SLASH -PERCENT -QUESTION -COLON -DOUBLE_COLON -SEMICOLON -DIFFERENTIAL_ORDER -BOOLEAN_LITERAL -STRING_LITERAL -NAME -UNSIGNED_INTEGER -FLOAT - -rule names: -dataType -unitType -unitTypeExponent -expression -simpleExpression -unaryOperator -bitOperator -comparisonOperator -logicalOperator -variable -functionCall -inlineExpression -odeEquation -kernel -block -stmt -compoundStmt -smallStmt -assignment -declaration -declaration_newline -anyDecorator -namespaceDecoratorNamespace -namespaceDecoratorName -returnStmt -ifStmt -ifClause -elifClause -elseClause -forStmt -whileStmt -nestMLCompilationUnit -neuron -neuronBody -synapse -synapseBody -onReceiveBlock -blockWithVariables -updateBlock -equationsBlock -inputBlock -spikeInputPort -continuousInputPort -inputQualifier -outputBlock -function -parameter -constParameter - - -atn: -[4, 1, 90, 593, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 3, 0, 103, 8, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, 114, 8, 1, 1, 1, 1, 1, 1, 1, 3, 1, 119, 8, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 125, 8, 1, 10, 1, 12, 1, 128, 9, 1, 1, 2, 3, 2, 131, 8, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 146, 8, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 155, 8, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 161, 8, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 5, 3, 182, 8, 3, 10, 3, 12, 3, 185, 9, 3, 1, 4, 1, 4, 1, 4, 1, 4, 3, 4, 191, 8, 4, 1, 4, 1, 4, 1, 4, 3, 4, 196, 8, 4, 1, 5, 1, 5, 1, 5, 3, 5, 201, 8, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 208, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 217, 8, 7, 1, 8, 1, 8, 3, 8, 221, 8, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 228, 8, 9, 1, 9, 5, 9, 231, 8, 9, 10, 9, 12, 9, 234, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 5, 10, 241, 8, 10, 10, 10, 12, 10, 244, 9, 10, 3, 10, 246, 8, 10, 1, 10, 1, 10, 1, 11, 3, 11, 251, 8, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 3, 11, 259, 8, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 3, 12, 267, 8, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 5, 13, 280, 8, 13, 10, 13, 12, 13, 283, 9, 13, 1, 13, 3, 13, 286, 8, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 4, 14, 293, 8, 14, 11, 14, 12, 14, 294, 1, 14, 1, 14, 1, 15, 1, 15, 3, 15, 301, 8, 15, 1, 16, 1, 16, 1, 16, 3, 16, 306, 8, 16, 1, 17, 1, 17, 1, 17, 1, 17, 3, 17, 312, 8, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 3, 18, 322, 8, 18, 1, 18, 1, 18, 1, 19, 3, 19, 327, 8, 19, 1, 19, 3, 19, 330, 8, 19, 1, 19, 1, 19, 1, 19, 5, 19, 335, 8, 19, 10, 19, 12, 19, 338, 9, 19, 1, 19, 1, 19, 1, 19, 3, 19, 343, 8, 19, 1, 19, 1, 19, 1, 19, 1, 19, 3, 19, 349, 8, 19, 1, 19, 5, 19, 352, 8, 19, 10, 19, 12, 19, 355, 9, 19, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 3, 21, 367, 8, 21, 1, 22, 1, 22, 1, 23, 1, 23, 1, 24, 1, 24, 3, 24, 375, 8, 24, 1, 25, 1, 25, 5, 25, 379, 8, 25, 10, 25, 12, 25, 382, 9, 25, 1, 25, 3, 25, 385, 8, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 1, 28, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 3, 29, 409, 8, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 4, 31, 423, 8, 31, 11, 31, 12, 31, 424, 1, 31, 1, 31, 1, 32, 1, 32, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 4, 33, 442, 8, 33, 11, 33, 12, 33, 443, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 4, 35, 462, 8, 35, 11, 35, 12, 35, 463, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 1, 36, 1, 36, 5, 36, 473, 8, 36, 10, 36, 12, 36, 476, 9, 36, 1, 36, 1, 36, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 4, 37, 487, 8, 37, 11, 37, 12, 37, 488, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 4, 39, 504, 8, 39, 11, 39, 12, 39, 505, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 4, 40, 516, 8, 40, 11, 40, 12, 40, 517, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 3, 41, 527, 8, 41, 1, 41, 1, 41, 5, 41, 531, 8, 41, 10, 41, 12, 41, 534, 9, 41, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 3, 42, 544, 8, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 3, 43, 553, 8, 43, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 561, 8, 44, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 5, 45, 572, 8, 45, 10, 45, 12, 45, 575, 9, 45, 3, 45, 577, 8, 45, 1, 45, 1, 45, 3, 45, 581, 8, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 0, 2, 2, 6, 48, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 0, 4, 2, 0, 51, 51, 75, 75, 1, 0, 89, 90, 1, 0, 33, 35, 3, 0, 25, 25, 86, 87, 89, 90, 650, 0, 102, 1, 0, 0, 0, 2, 113, 1, 0, 0, 0, 4, 130, 1, 0, 0, 0, 6, 145, 1, 0, 0, 0, 8, 195, 1, 0, 0, 0, 10, 200, 1, 0, 0, 0, 12, 207, 1, 0, 0, 0, 14, 216, 1, 0, 0, 0, 16, 220, 1, 0, 0, 0, 18, 222, 1, 0, 0, 0, 20, 235, 1, 0, 0, 0, 22, 250, 1, 0, 0, 0, 24, 262, 1, 0, 0, 0, 26, 270, 1, 0, 0, 0, 28, 289, 1, 0, 0, 0, 30, 300, 1, 0, 0, 0, 32, 305, 1, 0, 0, 0, 34, 311, 1, 0, 0, 0, 36, 315, 1, 0, 0, 0, 38, 326, 1, 0, 0, 0, 40, 356, 1, 0, 0, 0, 42, 366, 1, 0, 0, 0, 44, 368, 1, 0, 0, 0, 46, 370, 1, 0, 0, 0, 48, 372, 1, 0, 0, 0, 50, 376, 1, 0, 0, 0, 52, 386, 1, 0, 0, 0, 54, 391, 1, 0, 0, 0, 56, 396, 1, 0, 0, 0, 58, 400, 1, 0, 0, 0, 60, 414, 1, 0, 0, 0, 62, 422, 1, 0, 0, 0, 64, 428, 1, 0, 0, 0, 66, 432, 1, 0, 0, 0, 68, 447, 1, 0, 0, 0, 70, 452, 1, 0, 0, 0, 72, 467, 1, 0, 0, 0, 74, 481, 1, 0, 0, 0, 76, 492, 1, 0, 0, 0, 78, 496, 1, 0, 0, 0, 80, 509, 1, 0, 0, 0, 82, 521, 1, 0, 0, 0, 84, 538, 1, 0, 0, 0, 86, 552, 1, 0, 0, 0, 88, 554, 1, 0, 0, 0, 90, 565, 1, 0, 0, 0, 92, 585, 1, 0, 0, 0, 94, 588, 1, 0, 0, 0, 96, 103, 5, 10, 0, 0, 97, 103, 5, 11, 0, 0, 98, 103, 5, 12, 0, 0, 99, 103, 5, 13, 0, 0, 100, 103, 5, 14, 0, 0, 101, 103, 3, 2, 1, 0, 102, 96, 1, 0, 0, 0, 102, 97, 1, 0, 0, 0, 102, 98, 1, 0, 0, 0, 102, 99, 1, 0, 0, 0, 102, 100, 1, 0, 0, 0, 102, 101, 1, 0, 0, 0, 103, 1, 1, 0, 0, 0, 104, 105, 6, 1, -1, 0, 105, 106, 5, 49, 0, 0, 106, 107, 3, 2, 1, 0, 107, 108, 5, 50, 0, 0, 108, 114, 1, 0, 0, 0, 109, 110, 5, 89, 0, 0, 110, 111, 5, 79, 0, 0, 111, 114, 3, 2, 1, 2, 112, 114, 5, 88, 0, 0, 113, 104, 1, 0, 0, 0, 113, 109, 1, 0, 0, 0, 113, 112, 1, 0, 0, 0, 114, 126, 1, 0, 0, 0, 115, 118, 10, 3, 0, 0, 116, 119, 5, 77, 0, 0, 117, 119, 5, 79, 0, 0, 118, 116, 1, 0, 0, 0, 118, 117, 1, 0, 0, 0, 119, 120, 1, 0, 0, 0, 120, 125, 3, 2, 1, 4, 121, 122, 10, 4, 0, 0, 122, 123, 5, 78, 0, 0, 123, 125, 3, 4, 2, 0, 124, 115, 1, 0, 0, 0, 124, 121, 1, 0, 0, 0, 125, 128, 1, 0, 0, 0, 126, 124, 1, 0, 0, 0, 126, 127, 1, 0, 0, 0, 127, 3, 1, 0, 0, 0, 128, 126, 1, 0, 0, 0, 129, 131, 7, 0, 0, 0, 130, 129, 1, 0, 0, 0, 130, 131, 1, 0, 0, 0, 131, 132, 1, 0, 0, 0, 132, 133, 5, 89, 0, 0, 133, 5, 1, 0, 0, 0, 134, 135, 6, 3, -1, 0, 135, 136, 5, 49, 0, 0, 136, 137, 3, 6, 3, 0, 137, 138, 5, 50, 0, 0, 138, 146, 1, 0, 0, 0, 139, 140, 3, 10, 5, 0, 140, 141, 3, 6, 3, 9, 141, 146, 1, 0, 0, 0, 142, 143, 5, 28, 0, 0, 143, 146, 3, 6, 3, 4, 144, 146, 3, 8, 4, 0, 145, 134, 1, 0, 0, 0, 145, 139, 1, 0, 0, 0, 145, 142, 1, 0, 0, 0, 145, 144, 1, 0, 0, 0, 146, 183, 1, 0, 0, 0, 147, 148, 10, 10, 0, 0, 148, 149, 5, 78, 0, 0, 149, 182, 3, 6, 3, 10, 150, 154, 10, 8, 0, 0, 151, 155, 5, 77, 0, 0, 152, 155, 5, 79, 0, 0, 153, 155, 5, 80, 0, 0, 154, 151, 1, 0, 0, 0, 154, 152, 1, 0, 0, 0, 154, 153, 1, 0, 0, 0, 155, 156, 1, 0, 0, 0, 156, 182, 3, 6, 3, 9, 157, 160, 10, 7, 0, 0, 158, 161, 5, 51, 0, 0, 159, 161, 5, 75, 0, 0, 160, 158, 1, 0, 0, 0, 160, 159, 1, 0, 0, 0, 161, 162, 1, 0, 0, 0, 162, 182, 3, 6, 3, 8, 163, 164, 10, 6, 0, 0, 164, 165, 3, 12, 6, 0, 165, 166, 3, 6, 3, 7, 166, 182, 1, 0, 0, 0, 167, 168, 10, 5, 0, 0, 168, 169, 3, 14, 7, 0, 169, 170, 3, 6, 3, 6, 170, 182, 1, 0, 0, 0, 171, 172, 10, 3, 0, 0, 172, 173, 3, 16, 8, 0, 173, 174, 3, 6, 3, 4, 174, 182, 1, 0, 0, 0, 175, 176, 10, 2, 0, 0, 176, 177, 5, 81, 0, 0, 177, 178, 3, 6, 3, 0, 178, 179, 5, 82, 0, 0, 179, 180, 3, 6, 3, 3, 180, 182, 1, 0, 0, 0, 181, 147, 1, 0, 0, 0, 181, 150, 1, 0, 0, 0, 181, 157, 1, 0, 0, 0, 181, 163, 1, 0, 0, 0, 181, 167, 1, 0, 0, 0, 181, 171, 1, 0, 0, 0, 181, 175, 1, 0, 0, 0, 182, 185, 1, 0, 0, 0, 183, 181, 1, 0, 0, 0, 183, 184, 1, 0, 0, 0, 184, 7, 1, 0, 0, 0, 185, 183, 1, 0, 0, 0, 186, 196, 3, 20, 10, 0, 187, 196, 5, 86, 0, 0, 188, 190, 7, 1, 0, 0, 189, 191, 3, 18, 9, 0, 190, 189, 1, 0, 0, 0, 190, 191, 1, 0, 0, 0, 191, 196, 1, 0, 0, 0, 192, 196, 5, 87, 0, 0, 193, 196, 5, 25, 0, 0, 194, 196, 3, 18, 9, 0, 195, 186, 1, 0, 0, 0, 195, 187, 1, 0, 0, 0, 195, 188, 1, 0, 0, 0, 195, 192, 1, 0, 0, 0, 195, 193, 1, 0, 0, 0, 195, 194, 1, 0, 0, 0, 196, 9, 1, 0, 0, 0, 197, 201, 5, 51, 0, 0, 198, 201, 5, 75, 0, 0, 199, 201, 5, 52, 0, 0, 200, 197, 1, 0, 0, 0, 200, 198, 1, 0, 0, 0, 200, 199, 1, 0, 0, 0, 201, 11, 1, 0, 0, 0, 202, 208, 5, 55, 0, 0, 203, 208, 5, 54, 0, 0, 204, 208, 5, 53, 0, 0, 205, 208, 5, 61, 0, 0, 206, 208, 5, 62, 0, 0, 207, 202, 1, 0, 0, 0, 207, 203, 1, 0, 0, 0, 207, 204, 1, 0, 0, 0, 207, 205, 1, 0, 0, 0, 207, 206, 1, 0, 0, 0, 208, 13, 1, 0, 0, 0, 209, 217, 5, 63, 0, 0, 210, 217, 5, 65, 0, 0, 211, 217, 5, 70, 0, 0, 212, 217, 5, 71, 0, 0, 213, 217, 5, 72, 0, 0, 214, 217, 5, 73, 0, 0, 215, 217, 5, 64, 0, 0, 216, 209, 1, 0, 0, 0, 216, 210, 1, 0, 0, 0, 216, 211, 1, 0, 0, 0, 216, 212, 1, 0, 0, 0, 216, 213, 1, 0, 0, 0, 216, 214, 1, 0, 0, 0, 216, 215, 1, 0, 0, 0, 217, 15, 1, 0, 0, 0, 218, 221, 5, 26, 0, 0, 219, 221, 5, 27, 0, 0, 220, 218, 1, 0, 0, 0, 220, 219, 1, 0, 0, 0, 221, 17, 1, 0, 0, 0, 222, 227, 5, 88, 0, 0, 223, 224, 5, 56, 0, 0, 224, 225, 3, 6, 3, 0, 225, 226, 5, 58, 0, 0, 226, 228, 1, 0, 0, 0, 227, 223, 1, 0, 0, 0, 227, 228, 1, 0, 0, 0, 228, 232, 1, 0, 0, 0, 229, 231, 5, 85, 0, 0, 230, 229, 1, 0, 0, 0, 231, 234, 1, 0, 0, 0, 232, 230, 1, 0, 0, 0, 232, 233, 1, 0, 0, 0, 233, 19, 1, 0, 0, 0, 234, 232, 1, 0, 0, 0, 235, 236, 5, 88, 0, 0, 236, 245, 5, 49, 0, 0, 237, 242, 3, 6, 3, 0, 238, 239, 5, 74, 0, 0, 239, 241, 3, 6, 3, 0, 240, 238, 1, 0, 0, 0, 241, 244, 1, 0, 0, 0, 242, 240, 1, 0, 0, 0, 242, 243, 1, 0, 0, 0, 243, 246, 1, 0, 0, 0, 244, 242, 1, 0, 0, 0, 245, 237, 1, 0, 0, 0, 245, 246, 1, 0, 0, 0, 246, 247, 1, 0, 0, 0, 247, 248, 5, 50, 0, 0, 248, 21, 1, 0, 0, 0, 249, 251, 5, 29, 0, 0, 250, 249, 1, 0, 0, 0, 250, 251, 1, 0, 0, 0, 251, 252, 1, 0, 0, 0, 252, 253, 5, 16, 0, 0, 253, 254, 5, 88, 0, 0, 254, 255, 3, 0, 0, 0, 255, 256, 5, 76, 0, 0, 256, 258, 3, 6, 3, 0, 257, 259, 5, 84, 0, 0, 258, 257, 1, 0, 0, 0, 258, 259, 1, 0, 0, 0, 259, 260, 1, 0, 0, 0, 260, 261, 5, 9, 0, 0, 261, 23, 1, 0, 0, 0, 262, 263, 3, 18, 9, 0, 263, 264, 5, 76, 0, 0, 264, 266, 3, 6, 3, 0, 265, 267, 5, 84, 0, 0, 266, 265, 1, 0, 0, 0, 266, 267, 1, 0, 0, 0, 267, 268, 1, 0, 0, 0, 268, 269, 5, 9, 0, 0, 269, 25, 1, 0, 0, 0, 270, 271, 5, 30, 0, 0, 271, 272, 3, 18, 9, 0, 272, 273, 5, 76, 0, 0, 273, 281, 3, 6, 3, 0, 274, 275, 5, 4, 0, 0, 275, 276, 3, 18, 9, 0, 276, 277, 5, 76, 0, 0, 277, 278, 3, 6, 3, 0, 278, 280, 1, 0, 0, 0, 279, 274, 1, 0, 0, 0, 280, 283, 1, 0, 0, 0, 281, 279, 1, 0, 0, 0, 281, 282, 1, 0, 0, 0, 282, 285, 1, 0, 0, 0, 283, 281, 1, 0, 0, 0, 284, 286, 5, 84, 0, 0, 285, 284, 1, 0, 0, 0, 285, 286, 1, 0, 0, 0, 286, 287, 1, 0, 0, 0, 287, 288, 5, 9, 0, 0, 288, 27, 1, 0, 0, 0, 289, 290, 5, 9, 0, 0, 290, 292, 5, 1, 0, 0, 291, 293, 3, 30, 15, 0, 292, 291, 1, 0, 0, 0, 293, 294, 1, 0, 0, 0, 294, 292, 1, 0, 0, 0, 294, 295, 1, 0, 0, 0, 295, 296, 1, 0, 0, 0, 296, 297, 5, 2, 0, 0, 297, 29, 1, 0, 0, 0, 298, 301, 3, 34, 17, 0, 299, 301, 3, 32, 16, 0, 300, 298, 1, 0, 0, 0, 300, 299, 1, 0, 0, 0, 301, 31, 1, 0, 0, 0, 302, 306, 3, 50, 25, 0, 303, 306, 3, 58, 29, 0, 304, 306, 3, 60, 30, 0, 305, 302, 1, 0, 0, 0, 305, 303, 1, 0, 0, 0, 305, 304, 1, 0, 0, 0, 306, 33, 1, 0, 0, 0, 307, 312, 3, 36, 18, 0, 308, 312, 3, 20, 10, 0, 309, 312, 3, 38, 19, 0, 310, 312, 3, 48, 24, 0, 311, 307, 1, 0, 0, 0, 311, 308, 1, 0, 0, 0, 311, 309, 1, 0, 0, 0, 311, 310, 1, 0, 0, 0, 312, 313, 1, 0, 0, 0, 313, 314, 5, 9, 0, 0, 314, 35, 1, 0, 0, 0, 315, 321, 3, 18, 9, 0, 316, 322, 5, 76, 0, 0, 317, 322, 5, 66, 0, 0, 318, 322, 5, 67, 0, 0, 319, 322, 5, 68, 0, 0, 320, 322, 5, 69, 0, 0, 321, 316, 1, 0, 0, 0, 321, 317, 1, 0, 0, 0, 321, 318, 1, 0, 0, 0, 321, 319, 1, 0, 0, 0, 321, 320, 1, 0, 0, 0, 322, 323, 1, 0, 0, 0, 323, 324, 3, 6, 3, 0, 324, 37, 1, 0, 0, 0, 325, 327, 5, 29, 0, 0, 326, 325, 1, 0, 0, 0, 326, 327, 1, 0, 0, 0, 327, 329, 1, 0, 0, 0, 328, 330, 5, 16, 0, 0, 329, 328, 1, 0, 0, 0, 329, 330, 1, 0, 0, 0, 330, 331, 1, 0, 0, 0, 331, 336, 3, 18, 9, 0, 332, 333, 5, 74, 0, 0, 333, 335, 3, 18, 9, 0, 334, 332, 1, 0, 0, 0, 335, 338, 1, 0, 0, 0, 336, 334, 1, 0, 0, 0, 336, 337, 1, 0, 0, 0, 337, 339, 1, 0, 0, 0, 338, 336, 1, 0, 0, 0, 339, 342, 3, 0, 0, 0, 340, 341, 5, 76, 0, 0, 341, 343, 3, 6, 3, 0, 342, 340, 1, 0, 0, 0, 342, 343, 1, 0, 0, 0, 343, 348, 1, 0, 0, 0, 344, 345, 5, 59, 0, 0, 345, 346, 3, 6, 3, 0, 346, 347, 5, 60, 0, 0, 347, 349, 1, 0, 0, 0, 348, 344, 1, 0, 0, 0, 348, 349, 1, 0, 0, 0, 349, 353, 1, 0, 0, 0, 350, 352, 3, 42, 21, 0, 351, 350, 1, 0, 0, 0, 352, 355, 1, 0, 0, 0, 353, 351, 1, 0, 0, 0, 353, 354, 1, 0, 0, 0, 354, 39, 1, 0, 0, 0, 355, 353, 1, 0, 0, 0, 356, 357, 3, 38, 19, 0, 357, 358, 5, 9, 0, 0, 358, 41, 1, 0, 0, 0, 359, 367, 5, 45, 0, 0, 360, 367, 5, 46, 0, 0, 361, 362, 5, 47, 0, 0, 362, 363, 3, 44, 22, 0, 363, 364, 5, 83, 0, 0, 364, 365, 3, 46, 23, 0, 365, 367, 1, 0, 0, 0, 366, 359, 1, 0, 0, 0, 366, 360, 1, 0, 0, 0, 366, 361, 1, 0, 0, 0, 367, 43, 1, 0, 0, 0, 368, 369, 5, 88, 0, 0, 369, 45, 1, 0, 0, 0, 370, 371, 5, 88, 0, 0, 371, 47, 1, 0, 0, 0, 372, 374, 5, 17, 0, 0, 373, 375, 3, 6, 3, 0, 374, 373, 1, 0, 0, 0, 374, 375, 1, 0, 0, 0, 375, 49, 1, 0, 0, 0, 376, 380, 3, 52, 26, 0, 377, 379, 3, 54, 27, 0, 378, 377, 1, 0, 0, 0, 379, 382, 1, 0, 0, 0, 380, 378, 1, 0, 0, 0, 380, 381, 1, 0, 0, 0, 381, 384, 1, 0, 0, 0, 382, 380, 1, 0, 0, 0, 383, 385, 3, 56, 28, 0, 384, 383, 1, 0, 0, 0, 384, 385, 1, 0, 0, 0, 385, 51, 1, 0, 0, 0, 386, 387, 5, 18, 0, 0, 387, 388, 3, 6, 3, 0, 388, 389, 5, 82, 0, 0, 389, 390, 3, 28, 14, 0, 390, 53, 1, 0, 0, 0, 391, 392, 5, 19, 0, 0, 392, 393, 3, 6, 3, 0, 393, 394, 5, 82, 0, 0, 394, 395, 3, 28, 14, 0, 395, 55, 1, 0, 0, 0, 396, 397, 5, 20, 0, 0, 397, 398, 5, 82, 0, 0, 398, 399, 3, 28, 14, 0, 399, 57, 1, 0, 0, 0, 400, 401, 5, 21, 0, 0, 401, 402, 5, 88, 0, 0, 402, 403, 5, 23, 0, 0, 403, 404, 3, 6, 3, 0, 404, 405, 5, 48, 0, 0, 405, 406, 3, 6, 3, 0, 406, 408, 5, 24, 0, 0, 407, 409, 5, 75, 0, 0, 408, 407, 1, 0, 0, 0, 408, 409, 1, 0, 0, 0, 409, 410, 1, 0, 0, 0, 410, 411, 7, 1, 0, 0, 411, 412, 5, 82, 0, 0, 412, 413, 3, 28, 14, 0, 413, 59, 1, 0, 0, 0, 414, 415, 5, 22, 0, 0, 415, 416, 3, 6, 3, 0, 416, 417, 5, 82, 0, 0, 417, 418, 3, 28, 14, 0, 418, 61, 1, 0, 0, 0, 419, 423, 3, 64, 32, 0, 420, 423, 3, 68, 34, 0, 421, 423, 5, 9, 0, 0, 422, 419, 1, 0, 0, 0, 422, 420, 1, 0, 0, 0, 422, 421, 1, 0, 0, 0, 423, 424, 1, 0, 0, 0, 424, 422, 1, 0, 0, 0, 424, 425, 1, 0, 0, 0, 425, 426, 1, 0, 0, 0, 426, 427, 5, 0, 0, 1, 427, 63, 1, 0, 0, 0, 428, 429, 5, 31, 0, 0, 429, 430, 5, 88, 0, 0, 430, 431, 3, 66, 33, 0, 431, 65, 1, 0, 0, 0, 432, 433, 5, 82, 0, 0, 433, 434, 5, 9, 0, 0, 434, 441, 5, 1, 0, 0, 435, 442, 3, 74, 37, 0, 436, 442, 3, 78, 39, 0, 437, 442, 3, 80, 40, 0, 438, 442, 3, 88, 44, 0, 439, 442, 3, 76, 38, 0, 440, 442, 3, 90, 45, 0, 441, 435, 1, 0, 0, 0, 441, 436, 1, 0, 0, 0, 441, 437, 1, 0, 0, 0, 441, 438, 1, 0, 0, 0, 441, 439, 1, 0, 0, 0, 441, 440, 1, 0, 0, 0, 442, 443, 1, 0, 0, 0, 443, 441, 1, 0, 0, 0, 443, 444, 1, 0, 0, 0, 444, 445, 1, 0, 0, 0, 445, 446, 5, 2, 0, 0, 446, 67, 1, 0, 0, 0, 447, 448, 5, 32, 0, 0, 448, 449, 5, 88, 0, 0, 449, 450, 5, 82, 0, 0, 450, 451, 3, 70, 35, 0, 451, 69, 1, 0, 0, 0, 452, 453, 5, 9, 0, 0, 453, 461, 5, 1, 0, 0, 454, 462, 3, 74, 37, 0, 455, 462, 3, 78, 39, 0, 456, 462, 3, 80, 40, 0, 457, 462, 3, 88, 44, 0, 458, 462, 3, 90, 45, 0, 459, 462, 3, 72, 36, 0, 460, 462, 3, 76, 38, 0, 461, 454, 1, 0, 0, 0, 461, 455, 1, 0, 0, 0, 461, 456, 1, 0, 0, 0, 461, 457, 1, 0, 0, 0, 461, 458, 1, 0, 0, 0, 461, 459, 1, 0, 0, 0, 461, 460, 1, 0, 0, 0, 462, 463, 1, 0, 0, 0, 463, 461, 1, 0, 0, 0, 463, 464, 1, 0, 0, 0, 464, 465, 1, 0, 0, 0, 465, 466, 5, 2, 0, 0, 466, 71, 1, 0, 0, 0, 467, 468, 5, 41, 0, 0, 468, 469, 5, 49, 0, 0, 469, 474, 5, 88, 0, 0, 470, 471, 5, 74, 0, 0, 471, 473, 3, 94, 47, 0, 472, 470, 1, 0, 0, 0, 473, 476, 1, 0, 0, 0, 474, 472, 1, 0, 0, 0, 474, 475, 1, 0, 0, 0, 475, 477, 1, 0, 0, 0, 476, 474, 1, 0, 0, 0, 477, 478, 5, 50, 0, 0, 478, 479, 5, 82, 0, 0, 479, 480, 3, 28, 14, 0, 480, 73, 1, 0, 0, 0, 481, 482, 7, 2, 0, 0, 482, 483, 5, 82, 0, 0, 483, 484, 5, 9, 0, 0, 484, 486, 5, 1, 0, 0, 485, 487, 3, 40, 20, 0, 486, 485, 1, 0, 0, 0, 487, 488, 1, 0, 0, 0, 488, 486, 1, 0, 0, 0, 488, 489, 1, 0, 0, 0, 489, 490, 1, 0, 0, 0, 490, 491, 5, 2, 0, 0, 491, 75, 1, 0, 0, 0, 492, 493, 5, 36, 0, 0, 493, 494, 5, 82, 0, 0, 494, 495, 3, 28, 14, 0, 495, 77, 1, 0, 0, 0, 496, 497, 5, 37, 0, 0, 497, 498, 5, 82, 0, 0, 498, 499, 5, 9, 0, 0, 499, 503, 5, 1, 0, 0, 500, 504, 3, 22, 11, 0, 501, 504, 3, 24, 12, 0, 502, 504, 3, 26, 13, 0, 503, 500, 1, 0, 0, 0, 503, 501, 1, 0, 0, 0, 503, 502, 1, 0, 0, 0, 504, 505, 1, 0, 0, 0, 505, 503, 1, 0, 0, 0, 505, 506, 1, 0, 0, 0, 506, 507, 1, 0, 0, 0, 507, 508, 5, 2, 0, 0, 508, 79, 1, 0, 0, 0, 509, 510, 5, 38, 0, 0, 510, 511, 5, 82, 0, 0, 511, 512, 5, 9, 0, 0, 512, 515, 5, 1, 0, 0, 513, 516, 3, 82, 41, 0, 514, 516, 3, 84, 42, 0, 515, 513, 1, 0, 0, 0, 515, 514, 1, 0, 0, 0, 516, 517, 1, 0, 0, 0, 517, 515, 1, 0, 0, 0, 517, 518, 1, 0, 0, 0, 518, 519, 1, 0, 0, 0, 519, 520, 5, 2, 0, 0, 520, 81, 1, 0, 0, 0, 521, 526, 5, 88, 0, 0, 522, 523, 5, 56, 0, 0, 523, 524, 3, 6, 3, 0, 524, 525, 5, 58, 0, 0, 525, 527, 1, 0, 0, 0, 526, 522, 1, 0, 0, 0, 526, 527, 1, 0, 0, 0, 527, 528, 1, 0, 0, 0, 528, 532, 5, 57, 0, 0, 529, 531, 3, 86, 43, 0, 530, 529, 1, 0, 0, 0, 531, 534, 1, 0, 0, 0, 532, 530, 1, 0, 0, 0, 532, 533, 1, 0, 0, 0, 533, 535, 1, 0, 0, 0, 534, 532, 1, 0, 0, 0, 535, 536, 5, 42, 0, 0, 536, 537, 5, 9, 0, 0, 537, 83, 1, 0, 0, 0, 538, 543, 5, 88, 0, 0, 539, 540, 5, 56, 0, 0, 540, 541, 3, 6, 3, 0, 541, 542, 5, 58, 0, 0, 542, 544, 1, 0, 0, 0, 543, 539, 1, 0, 0, 0, 543, 544, 1, 0, 0, 0, 544, 545, 1, 0, 0, 0, 545, 546, 3, 0, 0, 0, 546, 547, 5, 57, 0, 0, 547, 548, 5, 40, 0, 0, 548, 549, 5, 9, 0, 0, 549, 85, 1, 0, 0, 0, 550, 553, 5, 43, 0, 0, 551, 553, 5, 44, 0, 0, 552, 550, 1, 0, 0, 0, 552, 551, 1, 0, 0, 0, 553, 87, 1, 0, 0, 0, 554, 555, 5, 39, 0, 0, 555, 556, 5, 82, 0, 0, 556, 557, 5, 9, 0, 0, 557, 560, 5, 1, 0, 0, 558, 561, 5, 42, 0, 0, 559, 561, 5, 40, 0, 0, 560, 558, 1, 0, 0, 0, 560, 559, 1, 0, 0, 0, 561, 562, 1, 0, 0, 0, 562, 563, 5, 9, 0, 0, 563, 564, 5, 2, 0, 0, 564, 89, 1, 0, 0, 0, 565, 566, 5, 15, 0, 0, 566, 567, 5, 88, 0, 0, 567, 576, 5, 49, 0, 0, 568, 573, 3, 92, 46, 0, 569, 570, 5, 74, 0, 0, 570, 572, 3, 92, 46, 0, 571, 569, 1, 0, 0, 0, 572, 575, 1, 0, 0, 0, 573, 571, 1, 0, 0, 0, 573, 574, 1, 0, 0, 0, 574, 577, 1, 0, 0, 0, 575, 573, 1, 0, 0, 0, 576, 568, 1, 0, 0, 0, 576, 577, 1, 0, 0, 0, 577, 578, 1, 0, 0, 0, 578, 580, 5, 50, 0, 0, 579, 581, 3, 0, 0, 0, 580, 579, 1, 0, 0, 0, 580, 581, 1, 0, 0, 0, 581, 582, 1, 0, 0, 0, 582, 583, 5, 82, 0, 0, 583, 584, 3, 28, 14, 0, 584, 91, 1, 0, 0, 0, 585, 586, 5, 88, 0, 0, 586, 587, 3, 0, 0, 0, 587, 93, 1, 0, 0, 0, 588, 589, 5, 88, 0, 0, 589, 590, 5, 76, 0, 0, 590, 591, 7, 3, 0, 0, 591, 95, 1, 0, 0, 0, 62, 102, 113, 118, 124, 126, 130, 145, 154, 160, 181, 183, 190, 195, 200, 207, 216, 220, 227, 232, 242, 245, 250, 258, 266, 281, 285, 294, 300, 305, 311, 321, 326, 329, 336, 342, 348, 353, 366, 374, 380, 384, 408, 422, 424, 441, 443, 461, 463, 474, 488, 503, 505, 515, 517, 526, 532, 543, 552, 560, 573, 576, 580] \ No newline at end of file diff --git a/pynestml/generated/PyNestMLParser.py b/pynestml/generated/PyNestMLParser.py index 14e11d9d2..f8e3d4171 100644 --- a/pynestml/generated/PyNestMLParser.py +++ b/pynestml/generated/PyNestMLParser.py @@ -1,4 +1,4 @@ -# Generated from PyNestMLParser.g4 by ANTLR 4.10 +# Generated from PyNestMLParser.g4 by ANTLR 4.10.1 # encoding: utf-8 from antlr4 import * from io import StringIO @@ -10,7 +10,7 @@ def serializedATN(): return [ - 4,1,90,593,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,2,6,7, + 4,1,90,605,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,2,6,7, 6,2,7,7,7,2,8,7,8,2,9,7,9,2,10,7,10,2,11,7,11,2,12,7,12,2,13,7,13, 2,14,7,14,2,15,7,15,2,16,7,16,2,17,7,17,2,18,7,18,2,19,7,19,2,20, 7,20,2,21,7,21,2,22,7,22,2,23,7,23,2,24,7,24,2,25,7,25,2,26,7,26, @@ -29,84 +29,85 @@ def serializedATN(): 8,3,8,221,8,8,1,9,1,9,1,9,1,9,1,9,3,9,228,8,9,1,9,5,9,231,8,9,10, 9,12,9,234,9,9,1,10,1,10,1,10,1,10,1,10,5,10,241,8,10,10,10,12,10, 244,9,10,3,10,246,8,10,1,10,1,10,1,11,3,11,251,8,11,1,11,1,11,1, - 11,1,11,1,11,1,11,3,11,259,8,11,1,11,1,11,1,12,1,12,1,12,1,12,3, - 12,267,8,12,1,12,1,12,1,13,1,13,1,13,1,13,1,13,1,13,1,13,1,13,1, - 13,5,13,280,8,13,10,13,12,13,283,9,13,1,13,3,13,286,8,13,1,13,1, - 13,1,14,1,14,1,14,4,14,293,8,14,11,14,12,14,294,1,14,1,14,1,15,1, - 15,3,15,301,8,15,1,16,1,16,1,16,3,16,306,8,16,1,17,1,17,1,17,1,17, - 3,17,312,8,17,1,17,1,17,1,18,1,18,1,18,1,18,1,18,1,18,3,18,322,8, - 18,1,18,1,18,1,19,3,19,327,8,19,1,19,3,19,330,8,19,1,19,1,19,1,19, - 5,19,335,8,19,10,19,12,19,338,9,19,1,19,1,19,1,19,3,19,343,8,19, - 1,19,1,19,1,19,1,19,3,19,349,8,19,1,19,5,19,352,8,19,10,19,12,19, - 355,9,19,1,20,1,20,1,20,1,21,1,21,1,21,1,21,1,21,1,21,1,21,3,21, - 367,8,21,1,22,1,22,1,23,1,23,1,24,1,24,3,24,375,8,24,1,25,1,25,5, - 25,379,8,25,10,25,12,25,382,9,25,1,25,3,25,385,8,25,1,26,1,26,1, - 26,1,26,1,26,1,27,1,27,1,27,1,27,1,27,1,28,1,28,1,28,1,28,1,29,1, - 29,1,29,1,29,1,29,1,29,1,29,1,29,3,29,409,8,29,1,29,1,29,1,29,1, - 29,1,30,1,30,1,30,1,30,1,30,1,31,1,31,1,31,4,31,423,8,31,11,31,12, - 31,424,1,31,1,31,1,32,1,32,1,32,1,32,1,33,1,33,1,33,1,33,1,33,1, - 33,1,33,1,33,1,33,4,33,442,8,33,11,33,12,33,443,1,33,1,33,1,34,1, - 34,1,34,1,34,1,34,1,35,1,35,1,35,1,35,1,35,1,35,1,35,1,35,1,35,4, - 35,462,8,35,11,35,12,35,463,1,35,1,35,1,36,1,36,1,36,1,36,1,36,5, - 36,473,8,36,10,36,12,36,476,9,36,1,36,1,36,1,36,1,36,1,37,1,37,1, - 37,1,37,1,37,4,37,487,8,37,11,37,12,37,488,1,37,1,37,1,38,1,38,1, - 38,1,38,1,39,1,39,1,39,1,39,1,39,1,39,1,39,4,39,504,8,39,11,39,12, - 39,505,1,39,1,39,1,40,1,40,1,40,1,40,1,40,1,40,4,40,516,8,40,11, - 40,12,40,517,1,40,1,40,1,41,1,41,1,41,1,41,1,41,3,41,527,8,41,1, - 41,1,41,5,41,531,8,41,10,41,12,41,534,9,41,1,41,1,41,1,41,1,42,1, - 42,1,42,1,42,1,42,3,42,544,8,42,1,42,1,42,1,42,1,42,1,42,1,43,1, - 43,3,43,553,8,43,1,44,1,44,1,44,1,44,1,44,1,44,3,44,561,8,44,1,44, - 1,44,1,44,1,45,1,45,1,45,1,45,1,45,1,45,5,45,572,8,45,10,45,12,45, - 575,9,45,3,45,577,8,45,1,45,1,45,3,45,581,8,45,1,45,1,45,1,45,1, - 46,1,46,1,46,1,47,1,47,1,47,1,47,1,47,0,2,2,6,48,0,2,4,6,8,10,12, - 14,16,18,20,22,24,26,28,30,32,34,36,38,40,42,44,46,48,50,52,54,56, - 58,60,62,64,66,68,70,72,74,76,78,80,82,84,86,88,90,92,94,0,4,2,0, - 51,51,75,75,1,0,89,90,1,0,33,35,3,0,25,25,86,87,89,90,650,0,102, - 1,0,0,0,2,113,1,0,0,0,4,130,1,0,0,0,6,145,1,0,0,0,8,195,1,0,0,0, - 10,200,1,0,0,0,12,207,1,0,0,0,14,216,1,0,0,0,16,220,1,0,0,0,18,222, - 1,0,0,0,20,235,1,0,0,0,22,250,1,0,0,0,24,262,1,0,0,0,26,270,1,0, - 0,0,28,289,1,0,0,0,30,300,1,0,0,0,32,305,1,0,0,0,34,311,1,0,0,0, - 36,315,1,0,0,0,38,326,1,0,0,0,40,356,1,0,0,0,42,366,1,0,0,0,44,368, - 1,0,0,0,46,370,1,0,0,0,48,372,1,0,0,0,50,376,1,0,0,0,52,386,1,0, - 0,0,54,391,1,0,0,0,56,396,1,0,0,0,58,400,1,0,0,0,60,414,1,0,0,0, - 62,422,1,0,0,0,64,428,1,0,0,0,66,432,1,0,0,0,68,447,1,0,0,0,70,452, - 1,0,0,0,72,467,1,0,0,0,74,481,1,0,0,0,76,492,1,0,0,0,78,496,1,0, - 0,0,80,509,1,0,0,0,82,521,1,0,0,0,84,538,1,0,0,0,86,552,1,0,0,0, - 88,554,1,0,0,0,90,565,1,0,0,0,92,585,1,0,0,0,94,588,1,0,0,0,96,103, - 5,10,0,0,97,103,5,11,0,0,98,103,5,12,0,0,99,103,5,13,0,0,100,103, - 5,14,0,0,101,103,3,2,1,0,102,96,1,0,0,0,102,97,1,0,0,0,102,98,1, - 0,0,0,102,99,1,0,0,0,102,100,1,0,0,0,102,101,1,0,0,0,103,1,1,0,0, - 0,104,105,6,1,-1,0,105,106,5,49,0,0,106,107,3,2,1,0,107,108,5,50, - 0,0,108,114,1,0,0,0,109,110,5,89,0,0,110,111,5,79,0,0,111,114,3, - 2,1,2,112,114,5,88,0,0,113,104,1,0,0,0,113,109,1,0,0,0,113,112,1, - 0,0,0,114,126,1,0,0,0,115,118,10,3,0,0,116,119,5,77,0,0,117,119, - 5,79,0,0,118,116,1,0,0,0,118,117,1,0,0,0,119,120,1,0,0,0,120,125, - 3,2,1,4,121,122,10,4,0,0,122,123,5,78,0,0,123,125,3,4,2,0,124,115, - 1,0,0,0,124,121,1,0,0,0,125,128,1,0,0,0,126,124,1,0,0,0,126,127, - 1,0,0,0,127,3,1,0,0,0,128,126,1,0,0,0,129,131,7,0,0,0,130,129,1, - 0,0,0,130,131,1,0,0,0,131,132,1,0,0,0,132,133,5,89,0,0,133,5,1,0, - 0,0,134,135,6,3,-1,0,135,136,5,49,0,0,136,137,3,6,3,0,137,138,5, - 50,0,0,138,146,1,0,0,0,139,140,3,10,5,0,140,141,3,6,3,9,141,146, - 1,0,0,0,142,143,5,28,0,0,143,146,3,6,3,4,144,146,3,8,4,0,145,134, - 1,0,0,0,145,139,1,0,0,0,145,142,1,0,0,0,145,144,1,0,0,0,146,183, - 1,0,0,0,147,148,10,10,0,0,148,149,5,78,0,0,149,182,3,6,3,10,150, - 154,10,8,0,0,151,155,5,77,0,0,152,155,5,79,0,0,153,155,5,80,0,0, - 154,151,1,0,0,0,154,152,1,0,0,0,154,153,1,0,0,0,155,156,1,0,0,0, - 156,182,3,6,3,9,157,160,10,7,0,0,158,161,5,51,0,0,159,161,5,75,0, - 0,160,158,1,0,0,0,160,159,1,0,0,0,161,162,1,0,0,0,162,182,3,6,3, - 8,163,164,10,6,0,0,164,165,3,12,6,0,165,166,3,6,3,7,166,182,1,0, - 0,0,167,168,10,5,0,0,168,169,3,14,7,0,169,170,3,6,3,6,170,182,1, - 0,0,0,171,172,10,3,0,0,172,173,3,16,8,0,173,174,3,6,3,4,174,182, - 1,0,0,0,175,176,10,2,0,0,176,177,5,81,0,0,177,178,3,6,3,0,178,179, - 5,82,0,0,179,180,3,6,3,3,180,182,1,0,0,0,181,147,1,0,0,0,181,150, - 1,0,0,0,181,157,1,0,0,0,181,163,1,0,0,0,181,167,1,0,0,0,181,171, - 1,0,0,0,181,175,1,0,0,0,182,185,1,0,0,0,183,181,1,0,0,0,183,184, - 1,0,0,0,184,7,1,0,0,0,185,183,1,0,0,0,186,196,3,20,10,0,187,196, - 5,86,0,0,188,190,7,1,0,0,189,191,3,18,9,0,190,189,1,0,0,0,190,191, - 1,0,0,0,191,196,1,0,0,0,192,196,5,87,0,0,193,196,5,25,0,0,194,196, - 3,18,9,0,195,186,1,0,0,0,195,187,1,0,0,0,195,188,1,0,0,0,195,192, - 1,0,0,0,195,193,1,0,0,0,195,194,1,0,0,0,196,9,1,0,0,0,197,201,5, - 51,0,0,198,201,5,75,0,0,199,201,5,52,0,0,200,197,1,0,0,0,200,198, + 11,1,11,1,11,1,11,3,11,259,8,11,1,11,5,11,262,8,11,10,11,12,11,265, + 9,11,1,11,1,11,1,12,1,12,1,12,1,12,3,12,273,8,12,1,12,5,12,276,8, + 12,10,12,12,12,279,9,12,1,12,1,12,1,13,1,13,1,13,1,13,1,13,1,13, + 1,13,1,13,1,13,5,13,292,8,13,10,13,12,13,295,9,13,1,13,3,13,298, + 8,13,1,13,1,13,1,14,1,14,1,14,4,14,305,8,14,11,14,12,14,306,1,14, + 1,14,1,15,1,15,3,15,313,8,15,1,16,1,16,1,16,3,16,318,8,16,1,17,1, + 17,1,17,1,17,3,17,324,8,17,1,17,1,17,1,18,1,18,1,18,1,18,1,18,1, + 18,3,18,334,8,18,1,18,1,18,1,19,3,19,339,8,19,1,19,3,19,342,8,19, + 1,19,1,19,1,19,5,19,347,8,19,10,19,12,19,350,9,19,1,19,1,19,1,19, + 3,19,355,8,19,1,19,1,19,1,19,1,19,3,19,361,8,19,1,19,5,19,364,8, + 19,10,19,12,19,367,9,19,1,20,1,20,1,20,1,21,1,21,1,21,1,21,1,21, + 1,21,1,21,3,21,379,8,21,1,22,1,22,1,23,1,23,1,24,1,24,3,24,387,8, + 24,1,25,1,25,5,25,391,8,25,10,25,12,25,394,9,25,1,25,3,25,397,8, + 25,1,26,1,26,1,26,1,26,1,26,1,27,1,27,1,27,1,27,1,27,1,28,1,28,1, + 28,1,28,1,29,1,29,1,29,1,29,1,29,1,29,1,29,1,29,3,29,421,8,29,1, + 29,1,29,1,29,1,29,1,30,1,30,1,30,1,30,1,30,1,31,1,31,1,31,4,31,435, + 8,31,11,31,12,31,436,1,31,1,31,1,32,1,32,1,32,1,32,1,33,1,33,1,33, + 1,33,1,33,1,33,1,33,1,33,1,33,4,33,454,8,33,11,33,12,33,455,1,33, + 1,33,1,34,1,34,1,34,1,34,1,34,1,35,1,35,1,35,1,35,1,35,1,35,1,35, + 1,35,1,35,4,35,474,8,35,11,35,12,35,475,1,35,1,35,1,36,1,36,1,36, + 1,36,1,36,5,36,485,8,36,10,36,12,36,488,9,36,1,36,1,36,1,36,1,36, + 1,37,1,37,1,37,1,37,1,37,4,37,499,8,37,11,37,12,37,500,1,37,1,37, + 1,38,1,38,1,38,1,38,1,39,1,39,1,39,1,39,1,39,1,39,1,39,4,39,516, + 8,39,11,39,12,39,517,1,39,1,39,1,40,1,40,1,40,1,40,1,40,1,40,4,40, + 528,8,40,11,40,12,40,529,1,40,1,40,1,41,1,41,1,41,1,41,1,41,3,41, + 539,8,41,1,41,1,41,5,41,543,8,41,10,41,12,41,546,9,41,1,41,1,41, + 1,41,1,42,1,42,1,42,1,42,1,42,3,42,556,8,42,1,42,1,42,1,42,1,42, + 1,42,1,43,1,43,3,43,565,8,43,1,44,1,44,1,44,1,44,1,44,1,44,3,44, + 573,8,44,1,44,1,44,1,44,1,45,1,45,1,45,1,45,1,45,1,45,5,45,584,8, + 45,10,45,12,45,587,9,45,3,45,589,8,45,1,45,1,45,3,45,593,8,45,1, + 45,1,45,1,45,1,46,1,46,1,46,1,47,1,47,1,47,1,47,1,47,0,2,2,6,48, + 0,2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,32,34,36,38,40,42,44, + 46,48,50,52,54,56,58,60,62,64,66,68,70,72,74,76,78,80,82,84,86,88, + 90,92,94,0,4,2,0,51,51,75,75,1,0,89,90,1,0,33,35,3,0,25,25,86,87, + 89,90,664,0,102,1,0,0,0,2,113,1,0,0,0,4,130,1,0,0,0,6,145,1,0,0, + 0,8,195,1,0,0,0,10,200,1,0,0,0,12,207,1,0,0,0,14,216,1,0,0,0,16, + 220,1,0,0,0,18,222,1,0,0,0,20,235,1,0,0,0,22,250,1,0,0,0,24,268, + 1,0,0,0,26,282,1,0,0,0,28,301,1,0,0,0,30,312,1,0,0,0,32,317,1,0, + 0,0,34,323,1,0,0,0,36,327,1,0,0,0,38,338,1,0,0,0,40,368,1,0,0,0, + 42,378,1,0,0,0,44,380,1,0,0,0,46,382,1,0,0,0,48,384,1,0,0,0,50,388, + 1,0,0,0,52,398,1,0,0,0,54,403,1,0,0,0,56,408,1,0,0,0,58,412,1,0, + 0,0,60,426,1,0,0,0,62,434,1,0,0,0,64,440,1,0,0,0,66,444,1,0,0,0, + 68,459,1,0,0,0,70,464,1,0,0,0,72,479,1,0,0,0,74,493,1,0,0,0,76,504, + 1,0,0,0,78,508,1,0,0,0,80,521,1,0,0,0,82,533,1,0,0,0,84,550,1,0, + 0,0,86,564,1,0,0,0,88,566,1,0,0,0,90,577,1,0,0,0,92,597,1,0,0,0, + 94,600,1,0,0,0,96,103,5,10,0,0,97,103,5,11,0,0,98,103,5,12,0,0,99, + 103,5,13,0,0,100,103,5,14,0,0,101,103,3,2,1,0,102,96,1,0,0,0,102, + 97,1,0,0,0,102,98,1,0,0,0,102,99,1,0,0,0,102,100,1,0,0,0,102,101, + 1,0,0,0,103,1,1,0,0,0,104,105,6,1,-1,0,105,106,5,49,0,0,106,107, + 3,2,1,0,107,108,5,50,0,0,108,114,1,0,0,0,109,110,5,89,0,0,110,111, + 5,79,0,0,111,114,3,2,1,2,112,114,5,88,0,0,113,104,1,0,0,0,113,109, + 1,0,0,0,113,112,1,0,0,0,114,126,1,0,0,0,115,118,10,3,0,0,116,119, + 5,77,0,0,117,119,5,79,0,0,118,116,1,0,0,0,118,117,1,0,0,0,119,120, + 1,0,0,0,120,125,3,2,1,4,121,122,10,4,0,0,122,123,5,78,0,0,123,125, + 3,4,2,0,124,115,1,0,0,0,124,121,1,0,0,0,125,128,1,0,0,0,126,124, + 1,0,0,0,126,127,1,0,0,0,127,3,1,0,0,0,128,126,1,0,0,0,129,131,7, + 0,0,0,130,129,1,0,0,0,130,131,1,0,0,0,131,132,1,0,0,0,132,133,5, + 89,0,0,133,5,1,0,0,0,134,135,6,3,-1,0,135,136,5,49,0,0,136,137,3, + 6,3,0,137,138,5,50,0,0,138,146,1,0,0,0,139,140,3,10,5,0,140,141, + 3,6,3,9,141,146,1,0,0,0,142,143,5,28,0,0,143,146,3,6,3,4,144,146, + 3,8,4,0,145,134,1,0,0,0,145,139,1,0,0,0,145,142,1,0,0,0,145,144, + 1,0,0,0,146,183,1,0,0,0,147,148,10,10,0,0,148,149,5,78,0,0,149,182, + 3,6,3,10,150,154,10,8,0,0,151,155,5,77,0,0,152,155,5,79,0,0,153, + 155,5,80,0,0,154,151,1,0,0,0,154,152,1,0,0,0,154,153,1,0,0,0,155, + 156,1,0,0,0,156,182,3,6,3,9,157,160,10,7,0,0,158,161,5,51,0,0,159, + 161,5,75,0,0,160,158,1,0,0,0,160,159,1,0,0,0,161,162,1,0,0,0,162, + 182,3,6,3,8,163,164,10,6,0,0,164,165,3,12,6,0,165,166,3,6,3,7,166, + 182,1,0,0,0,167,168,10,5,0,0,168,169,3,14,7,0,169,170,3,6,3,6,170, + 182,1,0,0,0,171,172,10,3,0,0,172,173,3,16,8,0,173,174,3,6,3,4,174, + 182,1,0,0,0,175,176,10,2,0,0,176,177,5,81,0,0,177,178,3,6,3,0,178, + 179,5,82,0,0,179,180,3,6,3,3,180,182,1,0,0,0,181,147,1,0,0,0,181, + 150,1,0,0,0,181,157,1,0,0,0,181,163,1,0,0,0,181,167,1,0,0,0,181, + 171,1,0,0,0,181,175,1,0,0,0,182,185,1,0,0,0,183,181,1,0,0,0,183, + 184,1,0,0,0,184,7,1,0,0,0,185,183,1,0,0,0,186,196,3,20,10,0,187, + 196,5,86,0,0,188,190,7,1,0,0,189,191,3,18,9,0,190,189,1,0,0,0,190, + 191,1,0,0,0,191,196,1,0,0,0,192,196,5,87,0,0,193,196,5,25,0,0,194, + 196,3,18,9,0,195,186,1,0,0,0,195,187,1,0,0,0,195,188,1,0,0,0,195, + 192,1,0,0,0,195,193,1,0,0,0,195,194,1,0,0,0,196,9,1,0,0,0,197,201, + 5,51,0,0,198,201,5,75,0,0,199,201,5,52,0,0,200,197,1,0,0,0,200,198, 1,0,0,0,200,199,1,0,0,0,201,11,1,0,0,0,202,208,5,55,0,0,203,208, 5,54,0,0,204,208,5,53,0,0,205,208,5,61,0,0,206,208,5,62,0,0,207, 202,1,0,0,0,207,203,1,0,0,0,207,204,1,0,0,0,207,205,1,0,0,0,207, @@ -126,109 +127,113 @@ def serializedATN(): 21,1,0,0,0,249,251,5,29,0,0,250,249,1,0,0,0,250,251,1,0,0,0,251, 252,1,0,0,0,252,253,5,16,0,0,253,254,5,88,0,0,254,255,3,0,0,0,255, 256,5,76,0,0,256,258,3,6,3,0,257,259,5,84,0,0,258,257,1,0,0,0,258, - 259,1,0,0,0,259,260,1,0,0,0,260,261,5,9,0,0,261,23,1,0,0,0,262,263, - 3,18,9,0,263,264,5,76,0,0,264,266,3,6,3,0,265,267,5,84,0,0,266,265, - 1,0,0,0,266,267,1,0,0,0,267,268,1,0,0,0,268,269,5,9,0,0,269,25,1, - 0,0,0,270,271,5,30,0,0,271,272,3,18,9,0,272,273,5,76,0,0,273,281, - 3,6,3,0,274,275,5,4,0,0,275,276,3,18,9,0,276,277,5,76,0,0,277,278, - 3,6,3,0,278,280,1,0,0,0,279,274,1,0,0,0,280,283,1,0,0,0,281,279, - 1,0,0,0,281,282,1,0,0,0,282,285,1,0,0,0,283,281,1,0,0,0,284,286, - 5,84,0,0,285,284,1,0,0,0,285,286,1,0,0,0,286,287,1,0,0,0,287,288, - 5,9,0,0,288,27,1,0,0,0,289,290,5,9,0,0,290,292,5,1,0,0,291,293,3, - 30,15,0,292,291,1,0,0,0,293,294,1,0,0,0,294,292,1,0,0,0,294,295, - 1,0,0,0,295,296,1,0,0,0,296,297,5,2,0,0,297,29,1,0,0,0,298,301,3, - 34,17,0,299,301,3,32,16,0,300,298,1,0,0,0,300,299,1,0,0,0,301,31, - 1,0,0,0,302,306,3,50,25,0,303,306,3,58,29,0,304,306,3,60,30,0,305, - 302,1,0,0,0,305,303,1,0,0,0,305,304,1,0,0,0,306,33,1,0,0,0,307,312, - 3,36,18,0,308,312,3,20,10,0,309,312,3,38,19,0,310,312,3,48,24,0, - 311,307,1,0,0,0,311,308,1,0,0,0,311,309,1,0,0,0,311,310,1,0,0,0, - 312,313,1,0,0,0,313,314,5,9,0,0,314,35,1,0,0,0,315,321,3,18,9,0, - 316,322,5,76,0,0,317,322,5,66,0,0,318,322,5,67,0,0,319,322,5,68, - 0,0,320,322,5,69,0,0,321,316,1,0,0,0,321,317,1,0,0,0,321,318,1,0, - 0,0,321,319,1,0,0,0,321,320,1,0,0,0,322,323,1,0,0,0,323,324,3,6, - 3,0,324,37,1,0,0,0,325,327,5,29,0,0,326,325,1,0,0,0,326,327,1,0, - 0,0,327,329,1,0,0,0,328,330,5,16,0,0,329,328,1,0,0,0,329,330,1,0, - 0,0,330,331,1,0,0,0,331,336,3,18,9,0,332,333,5,74,0,0,333,335,3, - 18,9,0,334,332,1,0,0,0,335,338,1,0,0,0,336,334,1,0,0,0,336,337,1, - 0,0,0,337,339,1,0,0,0,338,336,1,0,0,0,339,342,3,0,0,0,340,341,5, - 76,0,0,341,343,3,6,3,0,342,340,1,0,0,0,342,343,1,0,0,0,343,348,1, - 0,0,0,344,345,5,59,0,0,345,346,3,6,3,0,346,347,5,60,0,0,347,349, - 1,0,0,0,348,344,1,0,0,0,348,349,1,0,0,0,349,353,1,0,0,0,350,352, - 3,42,21,0,351,350,1,0,0,0,352,355,1,0,0,0,353,351,1,0,0,0,353,354, - 1,0,0,0,354,39,1,0,0,0,355,353,1,0,0,0,356,357,3,38,19,0,357,358, - 5,9,0,0,358,41,1,0,0,0,359,367,5,45,0,0,360,367,5,46,0,0,361,362, - 5,47,0,0,362,363,3,44,22,0,363,364,5,83,0,0,364,365,3,46,23,0,365, - 367,1,0,0,0,366,359,1,0,0,0,366,360,1,0,0,0,366,361,1,0,0,0,367, - 43,1,0,0,0,368,369,5,88,0,0,369,45,1,0,0,0,370,371,5,88,0,0,371, - 47,1,0,0,0,372,374,5,17,0,0,373,375,3,6,3,0,374,373,1,0,0,0,374, - 375,1,0,0,0,375,49,1,0,0,0,376,380,3,52,26,0,377,379,3,54,27,0,378, - 377,1,0,0,0,379,382,1,0,0,0,380,378,1,0,0,0,380,381,1,0,0,0,381, - 384,1,0,0,0,382,380,1,0,0,0,383,385,3,56,28,0,384,383,1,0,0,0,384, - 385,1,0,0,0,385,51,1,0,0,0,386,387,5,18,0,0,387,388,3,6,3,0,388, - 389,5,82,0,0,389,390,3,28,14,0,390,53,1,0,0,0,391,392,5,19,0,0,392, - 393,3,6,3,0,393,394,5,82,0,0,394,395,3,28,14,0,395,55,1,0,0,0,396, - 397,5,20,0,0,397,398,5,82,0,0,398,399,3,28,14,0,399,57,1,0,0,0,400, - 401,5,21,0,0,401,402,5,88,0,0,402,403,5,23,0,0,403,404,3,6,3,0,404, - 405,5,48,0,0,405,406,3,6,3,0,406,408,5,24,0,0,407,409,5,75,0,0,408, - 407,1,0,0,0,408,409,1,0,0,0,409,410,1,0,0,0,410,411,7,1,0,0,411, - 412,5,82,0,0,412,413,3,28,14,0,413,59,1,0,0,0,414,415,5,22,0,0,415, - 416,3,6,3,0,416,417,5,82,0,0,417,418,3,28,14,0,418,61,1,0,0,0,419, - 423,3,64,32,0,420,423,3,68,34,0,421,423,5,9,0,0,422,419,1,0,0,0, - 422,420,1,0,0,0,422,421,1,0,0,0,423,424,1,0,0,0,424,422,1,0,0,0, - 424,425,1,0,0,0,425,426,1,0,0,0,426,427,5,0,0,1,427,63,1,0,0,0,428, - 429,5,31,0,0,429,430,5,88,0,0,430,431,3,66,33,0,431,65,1,0,0,0,432, - 433,5,82,0,0,433,434,5,9,0,0,434,441,5,1,0,0,435,442,3,74,37,0,436, - 442,3,78,39,0,437,442,3,80,40,0,438,442,3,88,44,0,439,442,3,76,38, - 0,440,442,3,90,45,0,441,435,1,0,0,0,441,436,1,0,0,0,441,437,1,0, - 0,0,441,438,1,0,0,0,441,439,1,0,0,0,441,440,1,0,0,0,442,443,1,0, - 0,0,443,441,1,0,0,0,443,444,1,0,0,0,444,445,1,0,0,0,445,446,5,2, - 0,0,446,67,1,0,0,0,447,448,5,32,0,0,448,449,5,88,0,0,449,450,5,82, - 0,0,450,451,3,70,35,0,451,69,1,0,0,0,452,453,5,9,0,0,453,461,5,1, - 0,0,454,462,3,74,37,0,455,462,3,78,39,0,456,462,3,80,40,0,457,462, - 3,88,44,0,458,462,3,90,45,0,459,462,3,72,36,0,460,462,3,76,38,0, - 461,454,1,0,0,0,461,455,1,0,0,0,461,456,1,0,0,0,461,457,1,0,0,0, - 461,458,1,0,0,0,461,459,1,0,0,0,461,460,1,0,0,0,462,463,1,0,0,0, - 463,461,1,0,0,0,463,464,1,0,0,0,464,465,1,0,0,0,465,466,5,2,0,0, - 466,71,1,0,0,0,467,468,5,41,0,0,468,469,5,49,0,0,469,474,5,88,0, - 0,470,471,5,74,0,0,471,473,3,94,47,0,472,470,1,0,0,0,473,476,1,0, - 0,0,474,472,1,0,0,0,474,475,1,0,0,0,475,477,1,0,0,0,476,474,1,0, - 0,0,477,478,5,50,0,0,478,479,5,82,0,0,479,480,3,28,14,0,480,73,1, - 0,0,0,481,482,7,2,0,0,482,483,5,82,0,0,483,484,5,9,0,0,484,486,5, - 1,0,0,485,487,3,40,20,0,486,485,1,0,0,0,487,488,1,0,0,0,488,486, - 1,0,0,0,488,489,1,0,0,0,489,490,1,0,0,0,490,491,5,2,0,0,491,75,1, - 0,0,0,492,493,5,36,0,0,493,494,5,82,0,0,494,495,3,28,14,0,495,77, - 1,0,0,0,496,497,5,37,0,0,497,498,5,82,0,0,498,499,5,9,0,0,499,503, - 5,1,0,0,500,504,3,22,11,0,501,504,3,24,12,0,502,504,3,26,13,0,503, - 500,1,0,0,0,503,501,1,0,0,0,503,502,1,0,0,0,504,505,1,0,0,0,505, - 503,1,0,0,0,505,506,1,0,0,0,506,507,1,0,0,0,507,508,5,2,0,0,508, - 79,1,0,0,0,509,510,5,38,0,0,510,511,5,82,0,0,511,512,5,9,0,0,512, - 515,5,1,0,0,513,516,3,82,41,0,514,516,3,84,42,0,515,513,1,0,0,0, - 515,514,1,0,0,0,516,517,1,0,0,0,517,515,1,0,0,0,517,518,1,0,0,0, - 518,519,1,0,0,0,519,520,5,2,0,0,520,81,1,0,0,0,521,526,5,88,0,0, - 522,523,5,56,0,0,523,524,3,6,3,0,524,525,5,58,0,0,525,527,1,0,0, - 0,526,522,1,0,0,0,526,527,1,0,0,0,527,528,1,0,0,0,528,532,5,57,0, - 0,529,531,3,86,43,0,530,529,1,0,0,0,531,534,1,0,0,0,532,530,1,0, - 0,0,532,533,1,0,0,0,533,535,1,0,0,0,534,532,1,0,0,0,535,536,5,42, - 0,0,536,537,5,9,0,0,537,83,1,0,0,0,538,543,5,88,0,0,539,540,5,56, - 0,0,540,541,3,6,3,0,541,542,5,58,0,0,542,544,1,0,0,0,543,539,1,0, - 0,0,543,544,1,0,0,0,544,545,1,0,0,0,545,546,3,0,0,0,546,547,5,57, - 0,0,547,548,5,40,0,0,548,549,5,9,0,0,549,85,1,0,0,0,550,553,5,43, - 0,0,551,553,5,44,0,0,552,550,1,0,0,0,552,551,1,0,0,0,553,87,1,0, - 0,0,554,555,5,39,0,0,555,556,5,82,0,0,556,557,5,9,0,0,557,560,5, - 1,0,0,558,561,5,42,0,0,559,561,5,40,0,0,560,558,1,0,0,0,560,559, - 1,0,0,0,561,562,1,0,0,0,562,563,5,9,0,0,563,564,5,2,0,0,564,89,1, - 0,0,0,565,566,5,15,0,0,566,567,5,88,0,0,567,576,5,49,0,0,568,573, - 3,92,46,0,569,570,5,74,0,0,570,572,3,92,46,0,571,569,1,0,0,0,572, - 575,1,0,0,0,573,571,1,0,0,0,573,574,1,0,0,0,574,577,1,0,0,0,575, - 573,1,0,0,0,576,568,1,0,0,0,576,577,1,0,0,0,577,578,1,0,0,0,578, - 580,5,50,0,0,579,581,3,0,0,0,580,579,1,0,0,0,580,581,1,0,0,0,581, - 582,1,0,0,0,582,583,5,82,0,0,583,584,3,28,14,0,584,91,1,0,0,0,585, - 586,5,88,0,0,586,587,3,0,0,0,587,93,1,0,0,0,588,589,5,88,0,0,589, - 590,5,76,0,0,590,591,7,3,0,0,591,95,1,0,0,0,62,102,113,118,124,126, - 130,145,154,160,181,183,190,195,200,207,216,220,227,232,242,245, - 250,258,266,281,285,294,300,305,311,321,326,329,336,342,348,353, - 366,374,380,384,408,422,424,441,443,461,463,474,488,503,505,515, - 517,526,532,543,552,560,573,576,580 + 259,1,0,0,0,259,263,1,0,0,0,260,262,3,42,21,0,261,260,1,0,0,0,262, + 265,1,0,0,0,263,261,1,0,0,0,263,264,1,0,0,0,264,266,1,0,0,0,265, + 263,1,0,0,0,266,267,5,9,0,0,267,23,1,0,0,0,268,269,3,18,9,0,269, + 270,5,76,0,0,270,272,3,6,3,0,271,273,5,84,0,0,272,271,1,0,0,0,272, + 273,1,0,0,0,273,277,1,0,0,0,274,276,3,42,21,0,275,274,1,0,0,0,276, + 279,1,0,0,0,277,275,1,0,0,0,277,278,1,0,0,0,278,280,1,0,0,0,279, + 277,1,0,0,0,280,281,5,9,0,0,281,25,1,0,0,0,282,283,5,30,0,0,283, + 284,3,18,9,0,284,285,5,76,0,0,285,293,3,6,3,0,286,287,5,4,0,0,287, + 288,3,18,9,0,288,289,5,76,0,0,289,290,3,6,3,0,290,292,1,0,0,0,291, + 286,1,0,0,0,292,295,1,0,0,0,293,291,1,0,0,0,293,294,1,0,0,0,294, + 297,1,0,0,0,295,293,1,0,0,0,296,298,5,84,0,0,297,296,1,0,0,0,297, + 298,1,0,0,0,298,299,1,0,0,0,299,300,5,9,0,0,300,27,1,0,0,0,301,302, + 5,9,0,0,302,304,5,1,0,0,303,305,3,30,15,0,304,303,1,0,0,0,305,306, + 1,0,0,0,306,304,1,0,0,0,306,307,1,0,0,0,307,308,1,0,0,0,308,309, + 5,2,0,0,309,29,1,0,0,0,310,313,3,34,17,0,311,313,3,32,16,0,312,310, + 1,0,0,0,312,311,1,0,0,0,313,31,1,0,0,0,314,318,3,50,25,0,315,318, + 3,58,29,0,316,318,3,60,30,0,317,314,1,0,0,0,317,315,1,0,0,0,317, + 316,1,0,0,0,318,33,1,0,0,0,319,324,3,36,18,0,320,324,3,20,10,0,321, + 324,3,38,19,0,322,324,3,48,24,0,323,319,1,0,0,0,323,320,1,0,0,0, + 323,321,1,0,0,0,323,322,1,0,0,0,324,325,1,0,0,0,325,326,5,9,0,0, + 326,35,1,0,0,0,327,333,3,18,9,0,328,334,5,76,0,0,329,334,5,66,0, + 0,330,334,5,67,0,0,331,334,5,68,0,0,332,334,5,69,0,0,333,328,1,0, + 0,0,333,329,1,0,0,0,333,330,1,0,0,0,333,331,1,0,0,0,333,332,1,0, + 0,0,334,335,1,0,0,0,335,336,3,6,3,0,336,37,1,0,0,0,337,339,5,29, + 0,0,338,337,1,0,0,0,338,339,1,0,0,0,339,341,1,0,0,0,340,342,5,16, + 0,0,341,340,1,0,0,0,341,342,1,0,0,0,342,343,1,0,0,0,343,348,3,18, + 9,0,344,345,5,74,0,0,345,347,3,18,9,0,346,344,1,0,0,0,347,350,1, + 0,0,0,348,346,1,0,0,0,348,349,1,0,0,0,349,351,1,0,0,0,350,348,1, + 0,0,0,351,354,3,0,0,0,352,353,5,76,0,0,353,355,3,6,3,0,354,352,1, + 0,0,0,354,355,1,0,0,0,355,360,1,0,0,0,356,357,5,59,0,0,357,358,3, + 6,3,0,358,359,5,60,0,0,359,361,1,0,0,0,360,356,1,0,0,0,360,361,1, + 0,0,0,361,365,1,0,0,0,362,364,3,42,21,0,363,362,1,0,0,0,364,367, + 1,0,0,0,365,363,1,0,0,0,365,366,1,0,0,0,366,39,1,0,0,0,367,365,1, + 0,0,0,368,369,3,38,19,0,369,370,5,9,0,0,370,41,1,0,0,0,371,379,5, + 45,0,0,372,379,5,46,0,0,373,374,5,47,0,0,374,375,3,44,22,0,375,376, + 5,83,0,0,376,377,3,46,23,0,377,379,1,0,0,0,378,371,1,0,0,0,378,372, + 1,0,0,0,378,373,1,0,0,0,379,43,1,0,0,0,380,381,5,88,0,0,381,45,1, + 0,0,0,382,383,5,88,0,0,383,47,1,0,0,0,384,386,5,17,0,0,385,387,3, + 6,3,0,386,385,1,0,0,0,386,387,1,0,0,0,387,49,1,0,0,0,388,392,3,52, + 26,0,389,391,3,54,27,0,390,389,1,0,0,0,391,394,1,0,0,0,392,390,1, + 0,0,0,392,393,1,0,0,0,393,396,1,0,0,0,394,392,1,0,0,0,395,397,3, + 56,28,0,396,395,1,0,0,0,396,397,1,0,0,0,397,51,1,0,0,0,398,399,5, + 18,0,0,399,400,3,6,3,0,400,401,5,82,0,0,401,402,3,28,14,0,402,53, + 1,0,0,0,403,404,5,19,0,0,404,405,3,6,3,0,405,406,5,82,0,0,406,407, + 3,28,14,0,407,55,1,0,0,0,408,409,5,20,0,0,409,410,5,82,0,0,410,411, + 3,28,14,0,411,57,1,0,0,0,412,413,5,21,0,0,413,414,5,88,0,0,414,415, + 5,23,0,0,415,416,3,6,3,0,416,417,5,48,0,0,417,418,3,6,3,0,418,420, + 5,24,0,0,419,421,5,75,0,0,420,419,1,0,0,0,420,421,1,0,0,0,421,422, + 1,0,0,0,422,423,7,1,0,0,423,424,5,82,0,0,424,425,3,28,14,0,425,59, + 1,0,0,0,426,427,5,22,0,0,427,428,3,6,3,0,428,429,5,82,0,0,429,430, + 3,28,14,0,430,61,1,0,0,0,431,435,3,64,32,0,432,435,3,68,34,0,433, + 435,5,9,0,0,434,431,1,0,0,0,434,432,1,0,0,0,434,433,1,0,0,0,435, + 436,1,0,0,0,436,434,1,0,0,0,436,437,1,0,0,0,437,438,1,0,0,0,438, + 439,5,0,0,1,439,63,1,0,0,0,440,441,5,31,0,0,441,442,5,88,0,0,442, + 443,3,66,33,0,443,65,1,0,0,0,444,445,5,82,0,0,445,446,5,9,0,0,446, + 453,5,1,0,0,447,454,3,74,37,0,448,454,3,78,39,0,449,454,3,80,40, + 0,450,454,3,88,44,0,451,454,3,76,38,0,452,454,3,90,45,0,453,447, + 1,0,0,0,453,448,1,0,0,0,453,449,1,0,0,0,453,450,1,0,0,0,453,451, + 1,0,0,0,453,452,1,0,0,0,454,455,1,0,0,0,455,453,1,0,0,0,455,456, + 1,0,0,0,456,457,1,0,0,0,457,458,5,2,0,0,458,67,1,0,0,0,459,460,5, + 32,0,0,460,461,5,88,0,0,461,462,5,82,0,0,462,463,3,70,35,0,463,69, + 1,0,0,0,464,465,5,9,0,0,465,473,5,1,0,0,466,474,3,74,37,0,467,474, + 3,78,39,0,468,474,3,80,40,0,469,474,3,88,44,0,470,474,3,90,45,0, + 471,474,3,72,36,0,472,474,3,76,38,0,473,466,1,0,0,0,473,467,1,0, + 0,0,473,468,1,0,0,0,473,469,1,0,0,0,473,470,1,0,0,0,473,471,1,0, + 0,0,473,472,1,0,0,0,474,475,1,0,0,0,475,473,1,0,0,0,475,476,1,0, + 0,0,476,477,1,0,0,0,477,478,5,2,0,0,478,71,1,0,0,0,479,480,5,41, + 0,0,480,481,5,49,0,0,481,486,5,88,0,0,482,483,5,74,0,0,483,485,3, + 94,47,0,484,482,1,0,0,0,485,488,1,0,0,0,486,484,1,0,0,0,486,487, + 1,0,0,0,487,489,1,0,0,0,488,486,1,0,0,0,489,490,5,50,0,0,490,491, + 5,82,0,0,491,492,3,28,14,0,492,73,1,0,0,0,493,494,7,2,0,0,494,495, + 5,82,0,0,495,496,5,9,0,0,496,498,5,1,0,0,497,499,3,40,20,0,498,497, + 1,0,0,0,499,500,1,0,0,0,500,498,1,0,0,0,500,501,1,0,0,0,501,502, + 1,0,0,0,502,503,5,2,0,0,503,75,1,0,0,0,504,505,5,36,0,0,505,506, + 5,82,0,0,506,507,3,28,14,0,507,77,1,0,0,0,508,509,5,37,0,0,509,510, + 5,82,0,0,510,511,5,9,0,0,511,515,5,1,0,0,512,516,3,22,11,0,513,516, + 3,24,12,0,514,516,3,26,13,0,515,512,1,0,0,0,515,513,1,0,0,0,515, + 514,1,0,0,0,516,517,1,0,0,0,517,515,1,0,0,0,517,518,1,0,0,0,518, + 519,1,0,0,0,519,520,5,2,0,0,520,79,1,0,0,0,521,522,5,38,0,0,522, + 523,5,82,0,0,523,524,5,9,0,0,524,527,5,1,0,0,525,528,3,82,41,0,526, + 528,3,84,42,0,527,525,1,0,0,0,527,526,1,0,0,0,528,529,1,0,0,0,529, + 527,1,0,0,0,529,530,1,0,0,0,530,531,1,0,0,0,531,532,5,2,0,0,532, + 81,1,0,0,0,533,538,5,88,0,0,534,535,5,56,0,0,535,536,3,6,3,0,536, + 537,5,58,0,0,537,539,1,0,0,0,538,534,1,0,0,0,538,539,1,0,0,0,539, + 540,1,0,0,0,540,544,5,57,0,0,541,543,3,86,43,0,542,541,1,0,0,0,543, + 546,1,0,0,0,544,542,1,0,0,0,544,545,1,0,0,0,545,547,1,0,0,0,546, + 544,1,0,0,0,547,548,5,42,0,0,548,549,5,9,0,0,549,83,1,0,0,0,550, + 555,5,88,0,0,551,552,5,56,0,0,552,553,3,6,3,0,553,554,5,58,0,0,554, + 556,1,0,0,0,555,551,1,0,0,0,555,556,1,0,0,0,556,557,1,0,0,0,557, + 558,3,0,0,0,558,559,5,57,0,0,559,560,5,40,0,0,560,561,5,9,0,0,561, + 85,1,0,0,0,562,565,5,43,0,0,563,565,5,44,0,0,564,562,1,0,0,0,564, + 563,1,0,0,0,565,87,1,0,0,0,566,567,5,39,0,0,567,568,5,82,0,0,568, + 569,5,9,0,0,569,572,5,1,0,0,570,573,5,42,0,0,571,573,5,40,0,0,572, + 570,1,0,0,0,572,571,1,0,0,0,573,574,1,0,0,0,574,575,5,9,0,0,575, + 576,5,2,0,0,576,89,1,0,0,0,577,578,5,15,0,0,578,579,5,88,0,0,579, + 588,5,49,0,0,580,585,3,92,46,0,581,582,5,74,0,0,582,584,3,92,46, + 0,583,581,1,0,0,0,584,587,1,0,0,0,585,583,1,0,0,0,585,586,1,0,0, + 0,586,589,1,0,0,0,587,585,1,0,0,0,588,580,1,0,0,0,588,589,1,0,0, + 0,589,590,1,0,0,0,590,592,5,50,0,0,591,593,3,0,0,0,592,591,1,0,0, + 0,592,593,1,0,0,0,593,594,1,0,0,0,594,595,5,82,0,0,595,596,3,28, + 14,0,596,91,1,0,0,0,597,598,5,88,0,0,598,599,3,0,0,0,599,93,1,0, + 0,0,600,601,5,88,0,0,601,602,5,76,0,0,602,603,7,3,0,0,603,95,1,0, + 0,0,64,102,113,118,124,126,130,145,154,160,181,183,190,195,200,207, + 216,220,227,232,242,245,250,258,263,272,277,293,297,306,312,317, + 323,333,338,341,348,354,360,365,378,386,392,396,420,434,436,453, + 455,473,475,486,500,515,517,527,529,538,544,555,564,572,585,588, + 592 ] class PyNestMLParser ( Parser ): @@ -442,7 +447,7 @@ class PyNestMLParser ( Parser ): def __init__(self, input:TokenStream, output:TextIO = sys.stdout): super().__init__(input, output) - self.checkVersion("4.10") + self.checkVersion("4.10.1") self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache) self._predicates = None @@ -1633,6 +1638,7 @@ def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): self.parser = parser self.recordable = None # Token self.variableName = None # Token + self.decorator = None # AnyDecoratorContext def INLINE_KEYWORD(self): return self.getToken(PyNestMLParser.INLINE_KEYWORD, 0) @@ -1660,6 +1666,13 @@ def SEMICOLON(self): def RECORDABLE_KEYWORD(self): return self.getToken(PyNestMLParser.RECORDABLE_KEYWORD, 0) + def anyDecorator(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(PyNestMLParser.AnyDecoratorContext) + else: + return self.getTypedRuleContext(PyNestMLParser.AnyDecoratorContext,i) + + def getRuleIndex(self): return PyNestMLParser.RULE_inlineExpression @@ -1705,7 +1718,17 @@ def inlineExpression(self): self.match(PyNestMLParser.SEMICOLON) - self.state = 260 + self.state = 263 + self._errHandler.sync(self) + _la = self._input.LA(1) + while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PyNestMLParser.DECORATOR_HOMOGENEOUS) | (1 << PyNestMLParser.DECORATOR_HETEROGENEOUS) | (1 << PyNestMLParser.AT))) != 0): + self.state = 260 + localctx.decorator = self.anyDecorator() + self.state = 265 + self._errHandler.sync(self) + _la = self._input.LA(1) + + self.state = 266 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -1724,6 +1747,7 @@ def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): self.parser = parser self.lhs = None # VariableContext self.rhs = None # ExpressionContext + self.decorator = None # AnyDecoratorContext def EQUALS(self): return self.getToken(PyNestMLParser.EQUALS, 0) @@ -1742,6 +1766,13 @@ def expression(self): def SEMICOLON(self): return self.getToken(PyNestMLParser.SEMICOLON, 0) + def anyDecorator(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(PyNestMLParser.AnyDecoratorContext) + else: + return self.getTypedRuleContext(PyNestMLParser.AnyDecoratorContext,i) + + def getRuleIndex(self): return PyNestMLParser.RULE_odeEquation @@ -1761,21 +1792,31 @@ def odeEquation(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 262 + self.state = 268 localctx.lhs = self.variable() - self.state = 263 + self.state = 269 self.match(PyNestMLParser.EQUALS) - self.state = 264 + self.state = 270 localctx.rhs = self.expression(0) - self.state = 266 + self.state = 272 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PyNestMLParser.SEMICOLON: - self.state = 265 + self.state = 271 self.match(PyNestMLParser.SEMICOLON) - self.state = 268 + self.state = 277 + self._errHandler.sync(self) + _la = self._input.LA(1) + while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PyNestMLParser.DECORATOR_HOMOGENEOUS) | (1 << PyNestMLParser.DECORATOR_HETEROGENEOUS) | (1 << PyNestMLParser.AT))) != 0): + self.state = 274 + localctx.decorator = self.anyDecorator() + self.state = 279 + self._errHandler.sync(self) + _la = self._input.LA(1) + + self.state = 280 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -1847,39 +1888,39 @@ def kernel(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 270 + self.state = 282 self.match(PyNestMLParser.KERNEL_KEYWORD) - self.state = 271 + self.state = 283 self.variable() - self.state = 272 + self.state = 284 self.match(PyNestMLParser.EQUALS) - self.state = 273 + self.state = 285 self.expression(0) - self.state = 281 + self.state = 293 self._errHandler.sync(self) _la = self._input.LA(1) while _la==PyNestMLParser.KERNEL_JOINING: - self.state = 274 + self.state = 286 self.match(PyNestMLParser.KERNEL_JOINING) - self.state = 275 + self.state = 287 self.variable() - self.state = 276 + self.state = 288 self.match(PyNestMLParser.EQUALS) - self.state = 277 + self.state = 289 self.expression(0) - self.state = 283 + self.state = 295 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 285 + self.state = 297 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PyNestMLParser.SEMICOLON: - self.state = 284 + self.state = 296 self.match(PyNestMLParser.SEMICOLON) - self.state = 287 + self.state = 299 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -1932,23 +1973,23 @@ def block(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 289 + self.state = 301 self.match(PyNestMLParser.NEWLINE) - self.state = 290 + self.state = 302 self.match(PyNestMLParser.INDENT) - self.state = 292 + self.state = 304 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 291 + self.state = 303 self.stmt() - self.state = 294 + self.state = 306 self._errHandler.sync(self) _la = self._input.LA(1) if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PyNestMLParser.INLINE_KEYWORD) | (1 << PyNestMLParser.RETURN_KEYWORD) | (1 << PyNestMLParser.IF_KEYWORD) | (1 << PyNestMLParser.FOR_KEYWORD) | (1 << PyNestMLParser.WHILE_KEYWORD) | (1 << PyNestMLParser.RECORDABLE_KEYWORD))) != 0) or _la==PyNestMLParser.NAME): break - self.state = 296 + self.state = 308 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -1991,17 +2032,17 @@ def stmt(self): localctx = PyNestMLParser.StmtContext(self, self._ctx, self.state) self.enterRule(localctx, 30, self.RULE_stmt) try: - self.state = 300 + self.state = 312 self._errHandler.sync(self) token = self._input.LA(1) if token in [PyNestMLParser.INLINE_KEYWORD, PyNestMLParser.RETURN_KEYWORD, PyNestMLParser.RECORDABLE_KEYWORD, PyNestMLParser.NAME]: self.enterOuterAlt(localctx, 1) - self.state = 298 + self.state = 310 self.smallStmt() pass elif token in [PyNestMLParser.IF_KEYWORD, PyNestMLParser.FOR_KEYWORD, PyNestMLParser.WHILE_KEYWORD]: self.enterOuterAlt(localctx, 2) - self.state = 299 + self.state = 311 self.compoundStmt() pass else: @@ -2052,22 +2093,22 @@ def compoundStmt(self): localctx = PyNestMLParser.CompoundStmtContext(self, self._ctx, self.state) self.enterRule(localctx, 32, self.RULE_compoundStmt) try: - self.state = 305 + self.state = 317 self._errHandler.sync(self) token = self._input.LA(1) if token in [PyNestMLParser.IF_KEYWORD]: self.enterOuterAlt(localctx, 1) - self.state = 302 + self.state = 314 self.ifStmt() pass elif token in [PyNestMLParser.FOR_KEYWORD]: self.enterOuterAlt(localctx, 2) - self.state = 303 + self.state = 315 self.forStmt() pass elif token in [PyNestMLParser.WHILE_KEYWORD]: self.enterOuterAlt(localctx, 3) - self.state = 304 + self.state = 316 self.whileStmt() pass else: @@ -2126,31 +2167,31 @@ def smallStmt(self): self.enterRule(localctx, 34, self.RULE_smallStmt) try: self.enterOuterAlt(localctx, 1) - self.state = 311 + self.state = 323 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,29,self._ctx) + la_ = self._interp.adaptivePredict(self._input,31,self._ctx) if la_ == 1: - self.state = 307 + self.state = 319 self.assignment() pass elif la_ == 2: - self.state = 308 + self.state = 320 self.functionCall() pass elif la_ == 3: - self.state = 309 + self.state = 321 self.declaration() pass elif la_ == 4: - self.state = 310 + self.state = 322 self.returnStmt() pass - self.state = 313 + self.state = 325 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -2215,35 +2256,35 @@ def assignment(self): self.enterRule(localctx, 36, self.RULE_assignment) try: self.enterOuterAlt(localctx, 1) - self.state = 315 + self.state = 327 localctx.lhs_variable = self.variable() - self.state = 321 + self.state = 333 self._errHandler.sync(self) token = self._input.LA(1) if token in [PyNestMLParser.EQUALS]: - self.state = 316 + self.state = 328 localctx.directAssignment = self.match(PyNestMLParser.EQUALS) pass elif token in [PyNestMLParser.PLUS_EQUALS]: - self.state = 317 + self.state = 329 localctx.compoundSum = self.match(PyNestMLParser.PLUS_EQUALS) pass elif token in [PyNestMLParser.MINUS_EQUALS]: - self.state = 318 + self.state = 330 localctx.compoundMinus = self.match(PyNestMLParser.MINUS_EQUALS) pass elif token in [PyNestMLParser.STAR_EQUALS]: - self.state = 319 + self.state = 331 localctx.compoundProduct = self.match(PyNestMLParser.STAR_EQUALS) pass elif token in [PyNestMLParser.FORWARD_SLASH_EQUALS]: - self.state = 320 + self.state = 332 localctx.compoundQuotient = self.match(PyNestMLParser.FORWARD_SLASH_EQUALS) pass else: raise NoViableAltException(self) - self.state = 323 + self.state = 335 self.expression(0) except RecognitionException as re: localctx.exception = re @@ -2331,67 +2372,67 @@ def declaration(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 326 + self.state = 338 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PyNestMLParser.RECORDABLE_KEYWORD: - self.state = 325 + self.state = 337 localctx.isRecordable = self.match(PyNestMLParser.RECORDABLE_KEYWORD) - self.state = 329 + self.state = 341 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PyNestMLParser.INLINE_KEYWORD: - self.state = 328 + self.state = 340 localctx.isInlineExpression = self.match(PyNestMLParser.INLINE_KEYWORD) - self.state = 331 + self.state = 343 self.variable() - self.state = 336 + self.state = 348 self._errHandler.sync(self) _la = self._input.LA(1) while _la==PyNestMLParser.COMMA: - self.state = 332 + self.state = 344 self.match(PyNestMLParser.COMMA) - self.state = 333 + self.state = 345 self.variable() - self.state = 338 + self.state = 350 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 339 + self.state = 351 self.dataType() - self.state = 342 + self.state = 354 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PyNestMLParser.EQUALS: - self.state = 340 + self.state = 352 self.match(PyNestMLParser.EQUALS) - self.state = 341 + self.state = 353 localctx.rhs = self.expression(0) - self.state = 348 + self.state = 360 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PyNestMLParser.LEFT_LEFT_SQUARE: - self.state = 344 + self.state = 356 self.match(PyNestMLParser.LEFT_LEFT_SQUARE) - self.state = 345 + self.state = 357 localctx.invariant = self.expression(0) - self.state = 346 + self.state = 358 self.match(PyNestMLParser.RIGHT_RIGHT_SQUARE) - self.state = 353 + self.state = 365 self._errHandler.sync(self) _la = self._input.LA(1) while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PyNestMLParser.DECORATOR_HOMOGENEOUS) | (1 << PyNestMLParser.DECORATOR_HETEROGENEOUS) | (1 << PyNestMLParser.AT))) != 0): - self.state = 350 + self.state = 362 localctx.decorator = self.anyDecorator() - self.state = 355 + self.state = 367 self._errHandler.sync(self) _la = self._input.LA(1) @@ -2436,9 +2477,9 @@ def declaration_newline(self): self.enterRule(localctx, 40, self.RULE_declaration_newline) try: self.enterOuterAlt(localctx, 1) - self.state = 356 + self.state = 368 self.declaration() - self.state = 357 + self.state = 369 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -2493,28 +2534,28 @@ def anyDecorator(self): localctx = PyNestMLParser.AnyDecoratorContext(self, self._ctx, self.state) self.enterRule(localctx, 42, self.RULE_anyDecorator) try: - self.state = 366 + self.state = 378 self._errHandler.sync(self) token = self._input.LA(1) if token in [PyNestMLParser.DECORATOR_HOMOGENEOUS]: self.enterOuterAlt(localctx, 1) - self.state = 359 + self.state = 371 self.match(PyNestMLParser.DECORATOR_HOMOGENEOUS) pass elif token in [PyNestMLParser.DECORATOR_HETEROGENEOUS]: self.enterOuterAlt(localctx, 2) - self.state = 360 + self.state = 372 self.match(PyNestMLParser.DECORATOR_HETEROGENEOUS) pass elif token in [PyNestMLParser.AT]: self.enterOuterAlt(localctx, 3) - self.state = 361 + self.state = 373 self.match(PyNestMLParser.AT) - self.state = 362 + self.state = 374 self.namespaceDecoratorNamespace() - self.state = 363 + self.state = 375 self.match(PyNestMLParser.DOUBLE_COLON) - self.state = 364 + self.state = 376 self.namespaceDecoratorName() pass else: @@ -2558,7 +2599,7 @@ def namespaceDecoratorNamespace(self): self.enterRule(localctx, 44, self.RULE_namespaceDecoratorNamespace) try: self.enterOuterAlt(localctx, 1) - self.state = 368 + self.state = 380 localctx.name = self.match(PyNestMLParser.NAME) except RecognitionException as re: localctx.exception = re @@ -2598,7 +2639,7 @@ def namespaceDecoratorName(self): self.enterRule(localctx, 46, self.RULE_namespaceDecoratorName) try: self.enterOuterAlt(localctx, 1) - self.state = 370 + self.state = 382 localctx.name = self.match(PyNestMLParser.NAME) except RecognitionException as re: localctx.exception = re @@ -2642,13 +2683,13 @@ def returnStmt(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 372 + self.state = 384 self.match(PyNestMLParser.RETURN_KEYWORD) - self.state = 374 + self.state = 386 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PyNestMLParser.INF_KEYWORD) | (1 << PyNestMLParser.NOT_KEYWORD) | (1 << PyNestMLParser.LEFT_PAREN) | (1 << PyNestMLParser.PLUS) | (1 << PyNestMLParser.TILDE))) != 0) or ((((_la - 75)) & ~0x3f) == 0 and ((1 << (_la - 75)) & ((1 << (PyNestMLParser.MINUS - 75)) | (1 << (PyNestMLParser.BOOLEAN_LITERAL - 75)) | (1 << (PyNestMLParser.STRING_LITERAL - 75)) | (1 << (PyNestMLParser.NAME - 75)) | (1 << (PyNestMLParser.UNSIGNED_INTEGER - 75)) | (1 << (PyNestMLParser.FLOAT - 75)))) != 0): - self.state = 373 + self.state = 385 self.expression(0) @@ -2702,23 +2743,23 @@ def ifStmt(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 376 + self.state = 388 self.ifClause() - self.state = 380 + self.state = 392 self._errHandler.sync(self) _la = self._input.LA(1) while _la==PyNestMLParser.ELIF_KEYWORD: - self.state = 377 + self.state = 389 self.elifClause() - self.state = 382 + self.state = 394 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 384 + self.state = 396 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PyNestMLParser.ELSE_KEYWORD: - self.state = 383 + self.state = 395 self.elseClause() @@ -2770,13 +2811,13 @@ def ifClause(self): self.enterRule(localctx, 52, self.RULE_ifClause) try: self.enterOuterAlt(localctx, 1) - self.state = 386 + self.state = 398 self.match(PyNestMLParser.IF_KEYWORD) - self.state = 387 + self.state = 399 self.expression(0) - self.state = 388 + self.state = 400 self.match(PyNestMLParser.COLON) - self.state = 389 + self.state = 401 self.block() except RecognitionException as re: localctx.exception = re @@ -2826,13 +2867,13 @@ def elifClause(self): self.enterRule(localctx, 54, self.RULE_elifClause) try: self.enterOuterAlt(localctx, 1) - self.state = 391 + self.state = 403 self.match(PyNestMLParser.ELIF_KEYWORD) - self.state = 392 + self.state = 404 self.expression(0) - self.state = 393 + self.state = 405 self.match(PyNestMLParser.COLON) - self.state = 394 + self.state = 406 self.block() except RecognitionException as re: localctx.exception = re @@ -2878,11 +2919,11 @@ def elseClause(self): self.enterRule(localctx, 56, self.RULE_elseClause) try: self.enterOuterAlt(localctx, 1) - self.state = 396 + self.state = 408 self.match(PyNestMLParser.ELSE_KEYWORD) - self.state = 397 + self.state = 409 self.match(PyNestMLParser.COLON) - self.state = 398 + self.state = 410 self.block() except RecognitionException as re: localctx.exception = re @@ -2961,39 +3002,39 @@ def forStmt(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 400 + self.state = 412 self.match(PyNestMLParser.FOR_KEYWORD) - self.state = 401 + self.state = 413 localctx.var = self.match(PyNestMLParser.NAME) - self.state = 402 + self.state = 414 self.match(PyNestMLParser.IN_KEYWORD) - self.state = 403 + self.state = 415 localctx.start_from = self.expression(0) - self.state = 404 + self.state = 416 self.match(PyNestMLParser.ELLIPSIS) - self.state = 405 + self.state = 417 localctx.end_at = self.expression(0) - self.state = 406 + self.state = 418 self.match(PyNestMLParser.STEP_KEYWORD) - self.state = 408 + self.state = 420 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PyNestMLParser.MINUS: - self.state = 407 + self.state = 419 localctx.negative = self.match(PyNestMLParser.MINUS) - self.state = 410 + self.state = 422 _la = self._input.LA(1) if not(_la==PyNestMLParser.UNSIGNED_INTEGER or _la==PyNestMLParser.FLOAT): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) self.consume() - self.state = 411 + self.state = 423 self.match(PyNestMLParser.COLON) - self.state = 412 + self.state = 424 self.block() except RecognitionException as re: localctx.exception = re @@ -3043,13 +3084,13 @@ def whileStmt(self): self.enterRule(localctx, 60, self.RULE_whileStmt) try: self.enterOuterAlt(localctx, 1) - self.state = 414 + self.state = 426 self.match(PyNestMLParser.WHILE_KEYWORD) - self.state = 415 + self.state = 427 self.expression(0) - self.state = 416 + self.state = 428 self.match(PyNestMLParser.COLON) - self.state = 417 + self.state = 429 self.block() except RecognitionException as re: localctx.exception = re @@ -3109,35 +3150,35 @@ def nestMLCompilationUnit(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 422 + self.state = 434 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 422 + self.state = 434 self._errHandler.sync(self) token = self._input.LA(1) if token in [PyNestMLParser.NEURON_KEYWORD]: - self.state = 419 + self.state = 431 self.neuron() pass elif token in [PyNestMLParser.SYNAPSE_KEYWORD]: - self.state = 420 + self.state = 432 self.synapse() pass elif token in [PyNestMLParser.NEWLINE]: - self.state = 421 + self.state = 433 self.match(PyNestMLParser.NEWLINE) pass else: raise NoViableAltException(self) - self.state = 424 + self.state = 436 self._errHandler.sync(self) _la = self._input.LA(1) if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PyNestMLParser.NEWLINE) | (1 << PyNestMLParser.NEURON_KEYWORD) | (1 << PyNestMLParser.SYNAPSE_KEYWORD))) != 0)): break - self.state = 426 + self.state = 438 self.match(PyNestMLParser.EOF) except RecognitionException as re: localctx.exception = re @@ -3183,11 +3224,11 @@ def neuron(self): self.enterRule(localctx, 64, self.RULE_neuron) try: self.enterOuterAlt(localctx, 1) - self.state = 428 + self.state = 440 self.match(PyNestMLParser.NEURON_KEYWORD) - self.state = 429 + self.state = 441 self.match(PyNestMLParser.NAME) - self.state = 430 + self.state = 442 self.neuronBody() except RecognitionException as re: localctx.exception = re @@ -3278,53 +3319,53 @@ def neuronBody(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 432 + self.state = 444 self.match(PyNestMLParser.COLON) - self.state = 433 + self.state = 445 self.match(PyNestMLParser.NEWLINE) - self.state = 434 + self.state = 446 self.match(PyNestMLParser.INDENT) - self.state = 441 + self.state = 453 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 441 + self.state = 453 self._errHandler.sync(self) token = self._input.LA(1) if token in [PyNestMLParser.STATE_KEYWORD, PyNestMLParser.PARAMETERS_KEYWORD, PyNestMLParser.INTERNALS_KEYWORD]: - self.state = 435 + self.state = 447 self.blockWithVariables() pass elif token in [PyNestMLParser.EQUATIONS_KEYWORD]: - self.state = 436 + self.state = 448 self.equationsBlock() pass elif token in [PyNestMLParser.INPUT_KEYWORD]: - self.state = 437 + self.state = 449 self.inputBlock() pass elif token in [PyNestMLParser.OUTPUT_KEYWORD]: - self.state = 438 + self.state = 450 self.outputBlock() pass elif token in [PyNestMLParser.UPDATE_KEYWORD]: - self.state = 439 + self.state = 451 self.updateBlock() pass elif token in [PyNestMLParser.FUNCTION_KEYWORD]: - self.state = 440 + self.state = 452 self.function() pass else: raise NoViableAltException(self) - self.state = 443 + self.state = 455 self._errHandler.sync(self) _la = self._input.LA(1) if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PyNestMLParser.FUNCTION_KEYWORD) | (1 << PyNestMLParser.STATE_KEYWORD) | (1 << PyNestMLParser.PARAMETERS_KEYWORD) | (1 << PyNestMLParser.INTERNALS_KEYWORD) | (1 << PyNestMLParser.UPDATE_KEYWORD) | (1 << PyNestMLParser.EQUATIONS_KEYWORD) | (1 << PyNestMLParser.INPUT_KEYWORD) | (1 << PyNestMLParser.OUTPUT_KEYWORD))) != 0)): break - self.state = 445 + self.state = 457 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -3373,13 +3414,13 @@ def synapse(self): self.enterRule(localctx, 68, self.RULE_synapse) try: self.enterOuterAlt(localctx, 1) - self.state = 447 + self.state = 459 self.match(PyNestMLParser.SYNAPSE_KEYWORD) - self.state = 448 + self.state = 460 self.match(PyNestMLParser.NAME) - self.state = 449 + self.state = 461 self.match(PyNestMLParser.COLON) - self.state = 450 + self.state = 462 self.synapseBody() except RecognitionException as re: localctx.exception = re @@ -3474,55 +3515,55 @@ def synapseBody(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 452 + self.state = 464 self.match(PyNestMLParser.NEWLINE) - self.state = 453 + self.state = 465 self.match(PyNestMLParser.INDENT) - self.state = 461 + self.state = 473 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 461 + self.state = 473 self._errHandler.sync(self) token = self._input.LA(1) if token in [PyNestMLParser.STATE_KEYWORD, PyNestMLParser.PARAMETERS_KEYWORD, PyNestMLParser.INTERNALS_KEYWORD]: - self.state = 454 + self.state = 466 self.blockWithVariables() pass elif token in [PyNestMLParser.EQUATIONS_KEYWORD]: - self.state = 455 + self.state = 467 self.equationsBlock() pass elif token in [PyNestMLParser.INPUT_KEYWORD]: - self.state = 456 + self.state = 468 self.inputBlock() pass elif token in [PyNestMLParser.OUTPUT_KEYWORD]: - self.state = 457 + self.state = 469 self.outputBlock() pass elif token in [PyNestMLParser.FUNCTION_KEYWORD]: - self.state = 458 + self.state = 470 self.function() pass elif token in [PyNestMLParser.ON_RECEIVE_KEYWORD]: - self.state = 459 + self.state = 471 self.onReceiveBlock() pass elif token in [PyNestMLParser.UPDATE_KEYWORD]: - self.state = 460 + self.state = 472 self.updateBlock() pass else: raise NoViableAltException(self) - self.state = 463 + self.state = 475 self._errHandler.sync(self) _la = self._input.LA(1) if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PyNestMLParser.FUNCTION_KEYWORD) | (1 << PyNestMLParser.STATE_KEYWORD) | (1 << PyNestMLParser.PARAMETERS_KEYWORD) | (1 << PyNestMLParser.INTERNALS_KEYWORD) | (1 << PyNestMLParser.UPDATE_KEYWORD) | (1 << PyNestMLParser.EQUATIONS_KEYWORD) | (1 << PyNestMLParser.INPUT_KEYWORD) | (1 << PyNestMLParser.OUTPUT_KEYWORD) | (1 << PyNestMLParser.ON_RECEIVE_KEYWORD))) != 0)): break - self.state = 465 + self.state = 477 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -3592,29 +3633,29 @@ def onReceiveBlock(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 467 + self.state = 479 self.match(PyNestMLParser.ON_RECEIVE_KEYWORD) - self.state = 468 + self.state = 480 self.match(PyNestMLParser.LEFT_PAREN) - self.state = 469 + self.state = 481 localctx.inputPortName = self.match(PyNestMLParser.NAME) - self.state = 474 + self.state = 486 self._errHandler.sync(self) _la = self._input.LA(1) while _la==PyNestMLParser.COMMA: - self.state = 470 + self.state = 482 self.match(PyNestMLParser.COMMA) - self.state = 471 + self.state = 483 self.constParameter() - self.state = 476 + self.state = 488 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 477 + self.state = 489 self.match(PyNestMLParser.RIGHT_PAREN) - self.state = 478 + self.state = 490 self.match(PyNestMLParser.COLON) - self.state = 479 + self.state = 491 self.block() except RecognitionException as re: localctx.exception = re @@ -3680,7 +3721,7 @@ def blockWithVariables(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 481 + self.state = 493 localctx.blockType = self._input.LT(1) _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PyNestMLParser.STATE_KEYWORD) | (1 << PyNestMLParser.PARAMETERS_KEYWORD) | (1 << PyNestMLParser.INTERNALS_KEYWORD))) != 0)): @@ -3688,25 +3729,25 @@ def blockWithVariables(self): else: self._errHandler.reportMatch(self) self.consume() - self.state = 482 + self.state = 494 self.match(PyNestMLParser.COLON) - self.state = 483 + self.state = 495 self.match(PyNestMLParser.NEWLINE) - self.state = 484 + self.state = 496 self.match(PyNestMLParser.INDENT) - self.state = 486 + self.state = 498 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 485 + self.state = 497 self.declaration_newline() - self.state = 488 + self.state = 500 self._errHandler.sync(self) _la = self._input.LA(1) if not (_la==PyNestMLParser.INLINE_KEYWORD or _la==PyNestMLParser.RECORDABLE_KEYWORD or _la==PyNestMLParser.NAME): break - self.state = 490 + self.state = 502 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -3752,11 +3793,11 @@ def updateBlock(self): self.enterRule(localctx, 76, self.RULE_updateBlock) try: self.enterOuterAlt(localctx, 1) - self.state = 492 + self.state = 504 self.match(PyNestMLParser.UPDATE_KEYWORD) - self.state = 493 + self.state = 505 self.match(PyNestMLParser.COLON) - self.state = 494 + self.state = 506 self.block() except RecognitionException as re: localctx.exception = re @@ -3829,43 +3870,43 @@ def equationsBlock(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 496 + self.state = 508 self.match(PyNestMLParser.EQUATIONS_KEYWORD) - self.state = 497 + self.state = 509 self.match(PyNestMLParser.COLON) - self.state = 498 + self.state = 510 self.match(PyNestMLParser.NEWLINE) - self.state = 499 + self.state = 511 self.match(PyNestMLParser.INDENT) - self.state = 503 + self.state = 515 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 503 + self.state = 515 self._errHandler.sync(self) token = self._input.LA(1) if token in [PyNestMLParser.INLINE_KEYWORD, PyNestMLParser.RECORDABLE_KEYWORD]: - self.state = 500 + self.state = 512 self.inlineExpression() pass elif token in [PyNestMLParser.NAME]: - self.state = 501 + self.state = 513 self.odeEquation() pass elif token in [PyNestMLParser.KERNEL_KEYWORD]: - self.state = 502 + self.state = 514 self.kernel() pass else: raise NoViableAltException(self) - self.state = 505 + self.state = 517 self._errHandler.sync(self) _la = self._input.LA(1) if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PyNestMLParser.INLINE_KEYWORD) | (1 << PyNestMLParser.RECORDABLE_KEYWORD) | (1 << PyNestMLParser.KERNEL_KEYWORD))) != 0) or _la==PyNestMLParser.NAME): break - self.state = 507 + self.state = 519 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -3931,39 +3972,39 @@ def inputBlock(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 509 + self.state = 521 self.match(PyNestMLParser.INPUT_KEYWORD) - self.state = 510 + self.state = 522 self.match(PyNestMLParser.COLON) - self.state = 511 + self.state = 523 self.match(PyNestMLParser.NEWLINE) - self.state = 512 + self.state = 524 self.match(PyNestMLParser.INDENT) - self.state = 515 + self.state = 527 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 515 + self.state = 527 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,52,self._ctx) + la_ = self._interp.adaptivePredict(self._input,54,self._ctx) if la_ == 1: - self.state = 513 + self.state = 525 self.spikeInputPort() pass elif la_ == 2: - self.state = 514 + self.state = 526 self.continuousInputPort() pass - self.state = 517 + self.state = 529 self._errHandler.sync(self) _la = self._input.LA(1) if not (_la==PyNestMLParser.NAME): break - self.state = 519 + self.state = 531 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -4031,35 +4072,35 @@ def spikeInputPort(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 521 + self.state = 533 localctx.name = self.match(PyNestMLParser.NAME) - self.state = 526 + self.state = 538 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PyNestMLParser.LEFT_SQUARE_BRACKET: - self.state = 522 + self.state = 534 self.match(PyNestMLParser.LEFT_SQUARE_BRACKET) - self.state = 523 + self.state = 535 localctx.sizeParameter = self.expression(0) - self.state = 524 + self.state = 536 self.match(PyNestMLParser.RIGHT_SQUARE_BRACKET) - self.state = 528 + self.state = 540 self.match(PyNestMLParser.LEFT_ANGLE_MINUS) - self.state = 532 + self.state = 544 self._errHandler.sync(self) _la = self._input.LA(1) while _la==PyNestMLParser.INHIBITORY_KEYWORD or _la==PyNestMLParser.EXCITATORY_KEYWORD: - self.state = 529 + self.state = 541 self.inputQualifier() - self.state = 534 + self.state = 546 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 535 + self.state = 547 self.match(PyNestMLParser.SPIKE_KEYWORD) - self.state = 536 + self.state = 548 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -4124,27 +4165,27 @@ def continuousInputPort(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 538 + self.state = 550 localctx.name = self.match(PyNestMLParser.NAME) - self.state = 543 + self.state = 555 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PyNestMLParser.LEFT_SQUARE_BRACKET: - self.state = 539 + self.state = 551 self.match(PyNestMLParser.LEFT_SQUARE_BRACKET) - self.state = 540 + self.state = 552 localctx.sizeParameter = self.expression(0) - self.state = 541 + self.state = 553 self.match(PyNestMLParser.RIGHT_SQUARE_BRACKET) - self.state = 545 + self.state = 557 self.dataType() - self.state = 546 + self.state = 558 self.match(PyNestMLParser.LEFT_ANGLE_MINUS) - self.state = 547 + self.state = 559 self.match(PyNestMLParser.CONTINUOUS_KEYWORD) - self.state = 548 + self.state = 560 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -4188,15 +4229,15 @@ def inputQualifier(self): self.enterRule(localctx, 86, self.RULE_inputQualifier) try: self.enterOuterAlt(localctx, 1) - self.state = 552 + self.state = 564 self._errHandler.sync(self) token = self._input.LA(1) if token in [PyNestMLParser.INHIBITORY_KEYWORD]: - self.state = 550 + self.state = 562 localctx.isInhibitory = self.match(PyNestMLParser.INHIBITORY_KEYWORD) pass elif token in [PyNestMLParser.EXCITATORY_KEYWORD]: - self.state = 551 + self.state = 563 localctx.isExcitatory = self.match(PyNestMLParser.EXCITATORY_KEYWORD) pass else: @@ -4262,31 +4303,31 @@ def outputBlock(self): self.enterRule(localctx, 88, self.RULE_outputBlock) try: self.enterOuterAlt(localctx, 1) - self.state = 554 + self.state = 566 self.match(PyNestMLParser.OUTPUT_KEYWORD) - self.state = 555 + self.state = 567 self.match(PyNestMLParser.COLON) - self.state = 556 + self.state = 568 self.match(PyNestMLParser.NEWLINE) - self.state = 557 + self.state = 569 self.match(PyNestMLParser.INDENT) - self.state = 560 + self.state = 572 self._errHandler.sync(self) token = self._input.LA(1) if token in [PyNestMLParser.SPIKE_KEYWORD]: - self.state = 558 + self.state = 570 localctx.isSpike = self.match(PyNestMLParser.SPIKE_KEYWORD) pass elif token in [PyNestMLParser.CONTINUOUS_KEYWORD]: - self.state = 559 + self.state = 571 localctx.isContinuous = self.match(PyNestMLParser.CONTINUOUS_KEYWORD) pass else: raise NoViableAltException(self) - self.state = 562 + self.state = 574 self.match(PyNestMLParser.NEWLINE) - self.state = 563 + self.state = 575 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -4360,45 +4401,45 @@ def function(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 565 + self.state = 577 self.match(PyNestMLParser.FUNCTION_KEYWORD) - self.state = 566 + self.state = 578 self.match(PyNestMLParser.NAME) - self.state = 567 + self.state = 579 self.match(PyNestMLParser.LEFT_PAREN) - self.state = 576 + self.state = 588 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PyNestMLParser.NAME: - self.state = 568 + self.state = 580 self.parameter() - self.state = 573 + self.state = 585 self._errHandler.sync(self) _la = self._input.LA(1) while _la==PyNestMLParser.COMMA: - self.state = 569 + self.state = 581 self.match(PyNestMLParser.COMMA) - self.state = 570 + self.state = 582 self.parameter() - self.state = 575 + self.state = 587 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 578 + self.state = 590 self.match(PyNestMLParser.RIGHT_PAREN) - self.state = 580 + self.state = 592 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PyNestMLParser.INTEGER_KEYWORD) | (1 << PyNestMLParser.REAL_KEYWORD) | (1 << PyNestMLParser.STRING_KEYWORD) | (1 << PyNestMLParser.BOOLEAN_KEYWORD) | (1 << PyNestMLParser.VOID_KEYWORD) | (1 << PyNestMLParser.LEFT_PAREN))) != 0) or _la==PyNestMLParser.NAME or _la==PyNestMLParser.UNSIGNED_INTEGER: - self.state = 579 + self.state = 591 localctx.returnType = self.dataType() - self.state = 582 + self.state = 594 self.match(PyNestMLParser.COLON) - self.state = 583 + self.state = 595 self.block() except RecognitionException as re: localctx.exception = re @@ -4441,9 +4482,9 @@ def parameter(self): self.enterRule(localctx, 92, self.RULE_parameter) try: self.enterOuterAlt(localctx, 1) - self.state = 585 + self.state = 597 self.match(PyNestMLParser.NAME) - self.state = 586 + self.state = 598 self.dataType() except RecognitionException as re: localctx.exception = re @@ -4503,11 +4544,11 @@ def constParameter(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 588 + self.state = 600 localctx.name = self.match(PyNestMLParser.NAME) - self.state = 589 + self.state = 601 self.match(PyNestMLParser.EQUALS) - self.state = 590 + self.state = 602 localctx.value = self._input.LT(1) _la = self._input.LA(1) if not(_la==PyNestMLParser.INF_KEYWORD or ((((_la - 86)) & ~0x3f) == 0 and ((1 << (_la - 86)) & ((1 << (PyNestMLParser.BOOLEAN_LITERAL - 86)) | (1 << (PyNestMLParser.STRING_LITERAL - 86)) | (1 << (PyNestMLParser.UNSIGNED_INTEGER - 86)) | (1 << (PyNestMLParser.FLOAT - 86)))) != 0)): diff --git a/pynestml/generated/PyNestMLParserVisitor.py b/pynestml/generated/PyNestMLParserVisitor.py index bddb67c0a..0ddfa745d 100644 --- a/pynestml/generated/PyNestMLParserVisitor.py +++ b/pynestml/generated/PyNestMLParserVisitor.py @@ -1,4 +1,4 @@ -# Generated from PyNestMLParser.g4 by ANTLR 4.10 +# Generated from PyNestMLParser.g4 by ANTLR 4.10.1 from antlr4 import * if __name__ is not None and "." in __name__: from .PyNestMLParser import PyNestMLParser diff --git a/pynestml/grammars/PyNestMLParser.g4 b/pynestml/grammars/PyNestMLParser.g4 index f0d703e3b..4039e9eec 100644 --- a/pynestml/grammars/PyNestMLParser.g4 +++ b/pynestml/grammars/PyNestMLParser.g4 @@ -121,9 +121,9 @@ parser grammar PyNestMLParser; * Equations-Language *********************************************************************************************************************/ - inlineExpression : (recordable=RECORDABLE_KEYWORD)? INLINE_KEYWORD variableName=NAME dataType EQUALS expression (SEMICOLON)? NEWLINE; + inlineExpression : (recordable=RECORDABLE_KEYWORD)? INLINE_KEYWORD variableName=NAME dataType EQUALS expression (SEMICOLON)? decorator=anyDecorator* NEWLINE; - odeEquation : lhs=variable EQUALS rhs=expression (SEMICOLON)? NEWLINE; + odeEquation : lhs=variable EQUALS rhs=expression (SEMICOLON)? decorator=anyDecorator* NEWLINE; kernel : KERNEL_KEYWORD variable EQUALS expression (KERNEL_JOINING variable EQUALS expression)* SEMICOLON? NEWLINE; diff --git a/pynestml/meta_model/ast_inline_expression.py b/pynestml/meta_model/ast_inline_expression.py index 8aa3ba253..88a56f162 100644 --- a/pynestml/meta_model/ast_inline_expression.py +++ b/pynestml/meta_model/ast_inline_expression.py @@ -20,6 +20,7 @@ # along with NEST. If not, see . from pynestml.meta_model.ast_node import ASTNode +from pynestml.meta_model.ast_namespace_decorator import ASTNamespaceDecorator class ASTInlineExpression(ASTNode): @@ -35,7 +36,7 @@ class ASTInlineExpression(ASTNode): expression = None """ - def __init__(self, is_recordable=False, variable_name=None, data_type=None, expression=None, *args, **kwargs): + def __init__(self, is_recordable=False, variable_name=None, data_type=None, expression=None, decorators=None, *args, **kwargs): """ Standard constructor. @@ -51,10 +52,13 @@ def __init__(self, is_recordable=False, variable_name=None, data_type=None, expr :type expression: ASTExpression """ super(ASTInlineExpression, self).__init__(*args, **kwargs) + if decorators is None: + decorators = [] self.is_recordable = is_recordable self.variable_name = variable_name self.data_type = data_type self.expression = expression + self.decorators = decorators def clone(self): """ @@ -69,10 +73,15 @@ def clone(self): expression_dup = None if self.expression: expression_dup = self.expression.clone() + decorators_dup = None + if self.decorators: + decorators_dup = [dec.clone() if isinstance(dec, ASTNamespaceDecorator) else str(dec) for dec in + self.decorators] dup = ASTInlineExpression(is_recordable=self.is_recordable, variable_name=self.variable_name, data_type=data_type_dup, expression=expression_dup, + decorators=decorators_dup, # ASTNode common attributes: source_position=self.source_position, scope=self.scope, @@ -83,6 +92,11 @@ def clone(self): return dup + def get_decorators(self): + """ + """ + return self.decorators + def get_variable_name(self): """ Returns the variable name. diff --git a/pynestml/meta_model/ast_node_factory.py b/pynestml/meta_model/ast_node_factory.py index 0b16e29cd..aeedfd07e 100644 --- a/pynestml/meta_model/ast_node_factory.py +++ b/pynestml/meta_model/ast_node_factory.py @@ -283,15 +283,15 @@ def create_ast_synapse(cls, name, body, source_position, artifact_name): return ASTSynapse(name, body, artifact_name=artifact_name, source_position=source_position) @classmethod - def create_ast_ode_equation(cls, lhs, rhs, source_position): - # type: (ASTVariable,ASTSimpleExpression|ASTExpression,ASTSourceLocation) -> ASTOdeEquation - return ASTOdeEquation(lhs, rhs, source_position=source_position) + def create_ast_ode_equation(cls, lhs, rhs, source_position, decorators=None): + # type: (ASTVariable,ASTSimpleExpression|ASTExpression,ASTSourceLocation,Optional[List]) -> ASTOdeEquation + return ASTOdeEquation(lhs, rhs, source_position=source_position, decorators=decorators) @classmethod - def create_ast_inline_expression(cls, variable_name, data_type, expression, source_position, is_recordable=False): - # type: (str,ASTDataType,ASTExpression|ASTSimpleExpression,ASTSourceLocation,bool) -> ASTInlineExpression + def create_ast_inline_expression(cls, variable_name, data_type, expression, source_position, is_recordable=False, decorators: Optional[list] = None): + # type: (str,ASTDataType,ASTExpression|ASTSimpleExpression,ASTSourceLocation,bool,list) -> ASTInlineExpression return ASTInlineExpression(variable_name=variable_name, data_type=data_type, expression=expression, - is_recordable=is_recordable, source_position=source_position) + is_recordable=is_recordable, source_position=source_position, decorators=decorators) @classmethod def create_ast_kernel(cls, variables=None, expressions=None, source_position=None): diff --git a/pynestml/meta_model/ast_ode_equation.py b/pynestml/meta_model/ast_ode_equation.py index 01cae9251..20151a0e7 100644 --- a/pynestml/meta_model/ast_ode_equation.py +++ b/pynestml/meta_model/ast_ode_equation.py @@ -24,6 +24,7 @@ from pynestml.meta_model.ast_expression import ASTExpression from pynestml.meta_model.ast_simple_expression import ASTSimpleExpression from pynestml.meta_model.ast_variable import ASTVariable +from pynestml.meta_model.ast_namespace_decorator import ASTNamespaceDecorator class ASTOdeEquation(ASTNode): @@ -40,7 +41,7 @@ class ASTOdeEquation(ASTNode): rhs = None """ - def __init__(self, lhs, rhs, *args, **kwargs): + def __init__(self, lhs, rhs, decorators=None, *args, **kwargs): """ Standard constructor. @@ -54,8 +55,11 @@ def __init__(self, lhs, rhs, *args, **kwargs): super(ASTOdeEquation, self).__init__(*args, **kwargs) assert isinstance(lhs, ASTVariable) assert isinstance(rhs, ASTExpression) or isinstance(rhs, ASTSimpleExpression) + if decorators is None: + decorators = [] self.lhs = lhs self.rhs = rhs + self.decorators = decorators def clone(self): """ @@ -64,8 +68,14 @@ def clone(self): :return: new AST node instance :rtype: ASTOdeEquation """ + decorators_dup = None + if self.decorators: + decorators_dup = [dec.clone() if isinstance(dec, ASTNamespaceDecorator) else str(dec) for dec in + self.decorators] + dup = ASTOdeEquation(lhs=self.lhs.clone(), rhs=self.rhs.clone(), + decorators=decorators_dup, # ASTNode common attributes: source_position=self.source_position, scope=self.scope, @@ -76,6 +86,11 @@ def clone(self): return dup + def get_decorators(self): + """ + """ + return self.decorators + def get_lhs(self): """ Returns the left-hand side of the equation. diff --git a/pynestml/utils/ast_mechanism_information_collector.py b/pynestml/utils/ast_mechanism_information_collector.py new file mode 100644 index 000000000..1b994a42f --- /dev/null +++ b/pynestml/utils/ast_mechanism_information_collector.py @@ -0,0 +1,457 @@ +# -*- coding: utf-8 -*- +# +# ast_mechanism_information_collector.py +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . + +from pynestml.frontend.frontend_configuration import FrontendConfiguration +from collections import defaultdict +from pynestml.visitors.ast_visitor import ASTVisitor + + +class ASTMechanismInformationCollector(object): + """This class contains all basic mechanism information collection. Further collectors may be implemented to collect + further information for specific mechanism types (example: ASTSynapseInformationCollector)""" + collector_visitor = None + neuron = None + + @classmethod + def __init__(cls, neuron): + cls.neuron = neuron + cls.collector_visitor = ASTMechanismInformationCollectorVisitor() + neuron.accept(cls.collector_visitor) + + @classmethod + def detect_mechs(cls, mech_type: str): + """Detects the root expressions (either ode or inline) of the given type and returns the initial + info dictionary""" + mechs_info = defaultdict() + if not FrontendConfiguration.get_target_platform().upper() == 'NEST_COMPARTMENTAL': + return mechs_info + + mechanism_expressions = cls.collector_visitor.inlinesInEquationsBlock + for mechanism_expression in mechanism_expressions: + if "mechanism::" + mech_type in [(e.namespace + "::" + e.name) for e in + mechanism_expression.get_decorators()]: + mechanism_name = mechanism_expression.variable_name + mechs_info[mechanism_name] = defaultdict() + mechs_info[mechanism_name]["root_expression"] = mechanism_expression + + mechanism_expressions = cls.collector_visitor.odes + for mechanism_expression in mechanism_expressions: + if "mechanism::" + mech_type in [(e.namespace + "::" + e.name) for e in + mechanism_expression.get_decorators()]: + mechanism_name = mechanism_expression.lhs.name + mechs_info[mechanism_name] = defaultdict() + mechs_info[mechanism_name]["root_expression"] = mechanism_expression + + return mechs_info + + @classmethod + def extend_variable_list_name_based_restricted(cls, extended_list, appending_list, restrictor_list): + """go through appending_list and append every variable that is not in restrictor_list to extended_list for the + purpose of not re-searching the same variable""" + for app_item in appending_list: + appendable = True + for rest_item in restrictor_list: + if rest_item.name == app_item.name: + appendable = False + break + if appendable: + extended_list.append(app_item) + + return extended_list + + @classmethod + def extend_function_call_list_name_based_restricted(cls, extended_list, appending_list, restrictor_list): + """go through appending_list and append every variable that is not in restrictor_list to extended_list for the + purpose of not re-searching the same function""" + for app_item in appending_list: + appendable = True + for rest_item in restrictor_list: + if rest_item.callee_name == app_item.callee_name: + appendable = False + break + if appendable: + extended_list.append(app_item) + + return extended_list + + @classmethod + def extend_variables_with_initialisations(cls, neuron, mechs_info): + """collects initialization expressions for all variables and parameters contained in mechs_info""" + for mechanism_name, mechanism_info in mechs_info.items(): + var_init_visitor = VariableInitializationVisitor(mechanism_info) + neuron.accept(var_init_visitor) + mechs_info[mechanism_name]["States"] = var_init_visitor.states + mechs_info[mechanism_name]["Parameters"] = var_init_visitor.parameters + + return mechs_info + + @classmethod + def collect_mechanism_related_definitions(cls, neuron, mechs_info): + """Collects all parts of the nestml code the root expressions previously collected depend on. search + is cut at other mechanisms root expressions""" + from pynestml.meta_model.ast_inline_expression import ASTInlineExpression + from pynestml.meta_model.ast_ode_equation import ASTOdeEquation + + for mechanism_name, mechanism_info in mechs_info.items(): + variable_collector = ASTVariableCollectorVisitor() + neuron.accept(variable_collector) + global_states = variable_collector.all_states + global_parameters = variable_collector.all_parameters + + function_collector = ASTFunctionCollectorVisitor() + neuron.accept(function_collector) + global_functions = function_collector.all_functions + + inline_collector = ASTInlineEquationCollectorVisitor() + neuron.accept(inline_collector) + global_inlines = inline_collector.all_inlines + + ode_collector = ASTODEEquationCollectorVisitor() + neuron.accept(ode_collector) + global_odes = ode_collector.all_ode_equations + + kernel_collector = ASTKernelCollectorVisitor() + neuron.accept(kernel_collector) + global_kernels = kernel_collector.all_kernels + + mechanism_states = list() + mechanism_parameters = list() + mechanism_functions = list() + mechanism_inlines = list() + mechanism_odes = list() + synapse_kernels = list() + mechanism_dependencies = defaultdict() + mechanism_dependencies["concentrations"] = list() + mechanism_dependencies["channels"] = list() + mechanism_dependencies["receptors"] = list() + + mechanism_inlines.append(mechs_info[mechanism_name]["root_expression"]) + + search_variables = list() + search_functions = list() + + found_variables = list() + found_functions = list() + + local_variable_collector = ASTVariableCollectorVisitor() + mechanism_inlines[0].accept(local_variable_collector) + search_variables = local_variable_collector.all_variables + + local_function_call_collector = ASTFunctionCallCollectorVisitor() + mechanism_inlines[0].accept(local_function_call_collector) + search_functions = local_function_call_collector.all_function_calls + + while len(search_functions) > 0 or len(search_variables) > 0: + if len(search_functions) > 0: + function_call = search_functions[0] + for function in global_functions: + if function.name == function_call.callee_name: + mechanism_functions.append(function) + found_functions.append(function_call) + + local_variable_collector = ASTVariableCollectorVisitor() + function.accept(local_variable_collector) + search_variables = cls.extend_variable_list_name_based_restricted(search_variables, + local_variable_collector.all_variables, + search_variables + found_variables) + + local_function_call_collector = ASTFunctionCallCollectorVisitor() + function.accept(local_function_call_collector) + search_functions = cls.extend_function_call_list_name_based_restricted(search_functions, + local_function_call_collector.all_function_calls, + search_functions + found_functions) + # IMPLEMENT CATCH NONDEFINED!!! + search_functions.remove(function_call) + + elif len(search_variables) > 0: + variable = search_variables[0] + if not variable.name == "v_comp": + is_dependency = False + for inline in global_inlines: + if variable.name == inline.variable_name: + if isinstance(inline.get_decorators(), list): + if "mechanism" in [e.namespace for e in inline.get_decorators()]: + is_dependency = True + if not (isinstance(mechanism_info["root_expression"], ASTInlineExpression) and inline.variable_name == mechanism_info["root_expression"].variable_name): + if "channel" in [e.name for e in inline.get_decorators()]: + if not inline.variable_name in [i.variable_name for i in + mechanism_dependencies["channels"]]: + mechanism_dependencies["channels"].append(inline) + if "receptor" in [e.name for e in inline.get_decorators()]: + if not inline.variable_name in [i.variable_name for i in + mechanism_dependencies["receptors"]]: + mechanism_dependencies["receptors"].append(inline) + + if not is_dependency: + mechanism_inlines.append(inline) + + local_variable_collector = ASTVariableCollectorVisitor() + inline.accept(local_variable_collector) + search_variables = cls.extend_variable_list_name_based_restricted(search_variables, + local_variable_collector.all_variables, + search_variables + found_variables) + + local_function_call_collector = ASTFunctionCallCollectorVisitor() + inline.accept(local_function_call_collector) + search_functions = cls.extend_function_call_list_name_based_restricted( + search_functions, + local_function_call_collector.all_function_calls, + search_functions + found_functions) + + for ode in global_odes: + if variable.name == ode.lhs.name: + if isinstance(ode.get_decorators(), list): + if "mechanism" in [e.namespace for e in ode.get_decorators()]: + is_dependency = True + if not (isinstance(mechanism_info["root_expression"], ASTOdeEquation) and ode.lhs.name == mechanism_info["root_expression"].lhs.name): + if "concentration" in [e.name for e in ode.get_decorators()]: + if not ode.lhs.name in [o.lhs.name for o in + mechanism_dependencies["concentrations"]]: + mechanism_dependencies["concentrations"].append(ode) + + if not is_dependency: + mechanism_odes.append(ode) + + local_variable_collector = ASTVariableCollectorVisitor() + ode.accept(local_variable_collector) + search_variables = cls.extend_variable_list_name_based_restricted(search_variables, + local_variable_collector.all_variables, + search_variables + found_variables) + + local_function_call_collector = ASTFunctionCallCollectorVisitor() + ode.accept(local_function_call_collector) + search_functions = cls.extend_function_call_list_name_based_restricted( + search_functions, + local_function_call_collector.all_function_calls, + search_functions + found_functions) + + for state in global_states: + if variable.name == state.name and not is_dependency: + mechanism_states.append(state) + + for parameter in global_parameters: + if variable.name == parameter.name: + mechanism_parameters.append(parameter) + + for kernel in global_kernels: + if variable.name == kernel.get_variables()[0].name: + synapse_kernels.append(kernel) + + local_variable_collector = ASTVariableCollectorVisitor() + kernel.accept(local_variable_collector) + search_variables = cls.extend_variable_list_name_based_restricted(search_variables, + local_variable_collector.all_variables, + search_variables + found_variables) + + local_function_call_collector = ASTFunctionCallCollectorVisitor() + kernel.accept(local_function_call_collector) + search_functions = cls.extend_function_call_list_name_based_restricted(search_functions, + local_function_call_collector.all_function_calls, + search_functions + found_functions) + + search_variables.remove(variable) + found_variables.append(variable) + # IMPLEMENT CATCH NONDEFINED!!! + + mechs_info[mechanism_name]["States"] = mechanism_states + mechs_info[mechanism_name]["Parameters"] = mechanism_parameters + mechs_info[mechanism_name]["Functions"] = mechanism_functions + mechs_info[mechanism_name]["SecondaryInlineExpressions"] = mechanism_inlines + mechs_info[mechanism_name]["ODEs"] = mechanism_odes + mechs_info[mechanism_name]["Dependencies"] = mechanism_dependencies + + return mechs_info + + +class ASTMechanismInformationCollectorVisitor(ASTVisitor): + + def __init__(self): + super(ASTMechanismInformationCollectorVisitor, self).__init__() + self.inEquationsBlock = False + self.inlinesInEquationsBlock = list() + self.odes = list() + + def visit_equations_block(self, node): + self.inEquationsBlock = True + + def endvisit_equations_block(self, node): + self.inEquationsBlock = False + + def visit_inline_expression(self, node): + if self.inEquationsBlock: + self.inlinesInEquationsBlock.append(node) + + def visit_ode_equation(self, node): + self.odes.append(node) + + +# Helper collectors: +class VariableInitializationVisitor(ASTVisitor): + def __init__(self, channel_info): + super(VariableInitializationVisitor, self).__init__() + self.inside_variable = False + self.inside_declaration = False + self.inside_parameter_block = False + self.inside_state_block = False + self.current_declaration = None + self.states = defaultdict() + self.parameters = defaultdict() + self.channel_info = channel_info + + def visit_declaration(self, node): + self.inside_declaration = True + self.current_declaration = node + + def endvisit_declaration(self, node): + self.inside_declaration = False + self.current_declaration = None + + def visit_block_with_variables(self, node): + if node.is_state: + self.inside_state_block = True + if node.is_parameters: + self.inside_parameter_block = True + + def endvisit_block_with_variables(self, node): + self.inside_state_block = False + self.inside_parameter_block = False + + def visit_variable(self, node): + self.inside_variable = True + if self.inside_state_block and self.inside_declaration: + if any(node.name == variable.name for variable in self.channel_info["States"]): + self.states[node.name] = defaultdict() + self.states[node.name]["ASTVariable"] = node.clone() + self.states[node.name]["rhs_expression"] = self.current_declaration.get_expression() + + if self.inside_parameter_block and self.inside_declaration: + if any(node.name == variable.name for variable in self.channel_info["Parameters"]): + self.parameters[node.name] = defaultdict() + self.parameters[node.name]["ASTVariable"] = node.clone() + self.parameters[node.name]["rhs_expression"] = self.current_declaration.get_expression() + + def endvisit_variable(self, node): + self.inside_variable = False + + +class ASTODEEquationCollectorVisitor(ASTVisitor): + def __init__(self): + super(ASTODEEquationCollectorVisitor, self).__init__() + self.inside_ode_expression = False + self.all_ode_equations = list() + + def visit_ode_equation(self, node): + self.inside_ode_expression = True + self.all_ode_equations.append(node.clone()) + + def endvisit_ode_equation(self, node): + self.inside_ode_expression = False + + +class ASTVariableCollectorVisitor(ASTVisitor): + def __init__(self): + super(ASTVariableCollectorVisitor, self).__init__() + self.inside_variable = False + self.inside_block_with_variables = False + self.all_states = list() + self.all_parameters = list() + self.inside_states_block = False + self.inside_parameters_block = False + self.all_variables = list() + + def visit_block_with_variables(self, node): + self.inside_block_with_variables = True + if node.is_state: + self.inside_states_block = True + if node.is_parameters: + self.inside_parameters_block = True + + def endvisit_block_with_variables(self, node): + self.inside_states_block = False + self.inside_parameters_block = False + self.inside_block_with_variables = False + + def visit_variable(self, node): + self.inside_variable = True + self.all_variables.append(node.clone()) + if self.inside_states_block: + self.all_states.append(node.clone()) + if self.inside_parameters_block: + self.all_parameters.append(node.clone()) + + def endvisit_variable(self, node): + self.inside_variable = False + + +class ASTFunctionCollectorVisitor(ASTVisitor): + def __init__(self): + super(ASTFunctionCollectorVisitor, self).__init__() + self.inside_function = False + self.all_functions = list() + + def visit_function(self, node): + self.inside_function = True + self.all_functions.append(node.clone()) + + def endvisit_function(self, node): + self.inside_function = False + + +class ASTInlineEquationCollectorVisitor(ASTVisitor): + def __init__(self): + super(ASTInlineEquationCollectorVisitor, self).__init__() + self.inside_inline_expression = False + self.all_inlines = list() + + def visit_inline_expression(self, node): + self.inside_inline_expression = True + self.all_inlines.append(node.clone()) + + def endvisit_inline_expression(self, node): + self.inside_inline_expression = False + + +class ASTFunctionCallCollectorVisitor(ASTVisitor): + def __init__(self): + super(ASTFunctionCallCollectorVisitor, self).__init__() + self.inside_function_call = False + self.all_function_calls = list() + + def visit_function_call(self, node): + self.inside_function_call = True + self.all_function_calls.append(node.clone()) + + def endvisit_function_call(self, node): + self.inside_function_call = False + + +class ASTKernelCollectorVisitor(ASTVisitor): + def __init__(self): + super(ASTKernelCollectorVisitor, self).__init__() + self.inside_kernel = False + self.all_kernels = list() + + def visit_kernel(self, node): + self.inside_kernel = True + self.all_kernels.append(node.clone()) + + def endvisit_kernel(self, node): + self.inside_kernel = False diff --git a/pynestml/utils/ast_synapse_information_collector.py b/pynestml/utils/ast_synapse_information_collector.py new file mode 100644 index 000000000..f5a6763bc --- /dev/null +++ b/pynestml/utils/ast_synapse_information_collector.py @@ -0,0 +1,349 @@ +# -*- coding: utf-8 -*- +# +# ast_synapse_information_collector.py +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . + +from _collections import defaultdict +import copy + +from pynestml.meta_model.ast_inline_expression import ASTInlineExpression +from pynestml.meta_model.ast_kernel import ASTKernel +from pynestml.symbols.predefined_variables import PredefinedVariables +from pynestml.visitors.ast_visitor import ASTVisitor + + +class ASTSynapseInformationCollector(ASTVisitor): + """ + for each inline expression inside the equations block, + collect all synapse relevant information + + """ + + def __init__(self): + super(ASTSynapseInformationCollector, self).__init__() + + # various dicts to store collected information + self.kernel_name_to_kernel = defaultdict() + self.inline_expression_to_kernel_args = defaultdict(lambda: set()) + self.inline_expression_to_function_calls = defaultdict(lambda: set()) + self.kernel_to_function_calls = defaultdict(lambda: set()) + self.parameter_name_to_declaration = defaultdict(lambda: None) + self.state_name_to_declaration = defaultdict(lambda: None) + self.variable_name_to_declaration = defaultdict(lambda: None) + self.internal_var_name_to_declaration = defaultdict(lambda: None) + self.inline_expression_to_variables = defaultdict(lambda: set()) + self.kernel_to_rhs_variables = defaultdict(lambda: set()) + self.declaration_to_rhs_variables = defaultdict(lambda: set()) + self.input_port_name_to_input_port = defaultdict() + + # traversal states and nodes + self.inside_parameter_block = False + self.inside_state_block = False + self.inside_internals_block = False + self.inside_equations_block = False + self.inside_input_block = False + self.inside_inline_expression = False + self.inside_kernel = False + self.inside_kernel_call = False + self.inside_declaration = False + # self.inside_variable = False + self.inside_simple_expression = False + self.inside_expression = False + # self.inside_function_call = False + + self.current_inline_expression = None + self.current_kernel = None + self.current_expression = None + self.current_simple_expression = None + self.current_declaration = None + # self.current_variable = None + + self.current_synapse_name = None + + def get_state_declaration(self, variable_name): + return self.state_name_to_declaration[variable_name] + + def get_variable_declaration(self, variable_name): + return self.variable_name_to_declaration[variable_name] + + def get_kernel_by_name(self, name: str): + return self.kernel_name_to_kernel[name] + + def get_inline_expressions_with_kernels(self): + return self.inline_expression_to_kernel_args.keys() + + def get_kernel_function_calls(self, kernel: ASTKernel): + return self.kernel_to_function_calls[kernel] + + def get_inline_function_calls(self, inline: ASTInlineExpression): + return self.inline_expression_to_function_calls[inline] + + def get_variable_names_of_synapse(self, synapse_inline: ASTInlineExpression, exclude_names: set = set(), exclude_ignorable=True) -> set: + """extracts all variables specific to a single synapse + (which is defined by the inline expression containing kernels) + independently of what block they are declared in + it also cascades over all right hand side variables until all + variables are included""" + if exclude_ignorable: + exclude_names.update(self.get_variable_names_to_ignore()) + + # find all variables used in the inline + potential_variables = self.inline_expression_to_variables[synapse_inline] + + # find all kernels referenced by the inline + # and collect variables used by those kernels + kernel_arg_pairs = self.get_extracted_kernel_args(synapse_inline) + for kernel_var, spikes_var in kernel_arg_pairs: + kernel = self.get_kernel_by_name(kernel_var.get_name()) + potential_variables.update(self.kernel_to_rhs_variables[kernel]) + + # find declarations for all variables and check + # what variables their rhs expressions use + # for example if we have + # a = b * c + # then check if b and c are already in potential_variables + # if not, add those as well + potential_variables_copy = copy.copy(potential_variables) + + potential_variables_prev_count = len(potential_variables) + while True: + for potential_variable in potential_variables_copy: + var_name = potential_variable.get_name() + if var_name in exclude_names: + continue + declaration = self.get_variable_declaration(var_name) + if declaration is None: + continue + variables_referenced = self.declaration_to_rhs_variables[var_name] + potential_variables.update(variables_referenced) + if potential_variables_prev_count == len(potential_variables): + break + potential_variables_prev_count = len(potential_variables) + + # transform variables into their names and filter + # out anything form exclude_names + result = set() + for potential_variable in potential_variables: + var_name = potential_variable.get_name() + if var_name not in exclude_names: + result.add(var_name) + + return result + + @classmethod + def get_variable_names_to_ignore(cls): + return set(PredefinedVariables.get_variables().keys()).union({"v_comp"}) + + def get_synapse_specific_internal_declarations(self, synapse_inline: ASTInlineExpression) -> defaultdict: + synapse_variable_names = self.get_variable_names_of_synapse( + synapse_inline) + + # now match those variable names with + # variable declarations from the internals block + dereferenced = defaultdict() + for potential_internals_name in synapse_variable_names: + if potential_internals_name in self.internal_var_name_to_declaration: + dereferenced[potential_internals_name] = self.internal_var_name_to_declaration[potential_internals_name] + return dereferenced + + def get_synapse_specific_state_declarations(self, synapse_inline: ASTInlineExpression) -> defaultdict: + synapse_variable_names = self.get_variable_names_of_synapse( + synapse_inline) + + # now match those variable names with + # variable declarations from the state block + dereferenced = defaultdict() + for potential_state_name in synapse_variable_names: + if potential_state_name in self.state_name_to_declaration: + dereferenced[potential_state_name] = self.state_name_to_declaration[potential_state_name] + return dereferenced + + def get_synapse_specific_parameter_declarations(self, synapse_inline: ASTInlineExpression) -> defaultdict: + synapse_variable_names = self.get_variable_names_of_synapse( + synapse_inline) + + # now match those variable names with + # variable declarations from the parameter block + dereferenced = defaultdict() + for potential_param_name in synapse_variable_names: + if potential_param_name in self.parameter_name_to_declaration: + dereferenced[potential_param_name] = self.parameter_name_to_declaration[potential_param_name] + return dereferenced + + def get_extracted_kernel_args(self, inline_expression: ASTInlineExpression) -> set: + return self.inline_expression_to_kernel_args[inline_expression] + + def get_basic_kernel_variable_names(self, synapse_inline): + """ + for every occurence of convolve(port, spikes) generate "port__X__spikes" variable + gather those variables for this synapse inline and return their list + + note that those variables will occur as substring in other kernel variables i.e "port__X__spikes__d" or "__P__port__X__spikes__port__X__spikes" + + so we can use the result to identify all the other kernel variables related to the + specific synapse inline declaration + """ + order = 0 + results = [] + for syn_inline, args in self.inline_expression_to_kernel_args.items(): + if synapse_inline.variable_name == syn_inline.variable_name: + for kernel_var, spike_var in args: + kernel_name = kernel_var.get_name() + spike_input_port = self.input_port_name_to_input_port[spike_var.get_name( + )] + kernel_variable_name = self.construct_kernel_X_spike_buf_name( + kernel_name, spike_input_port, order) + results.append(kernel_variable_name) + + return results + + def get_used_kernel_names(self, inline_expression: ASTInlineExpression): + return [kernel_var.get_name() for kernel_var, _ in self.get_extracted_kernel_args(inline_expression)] + + def get_input_port_by_name(self, name): + return self.input_port_name_to_input_port[name] + + def get_used_spike_names(self, inline_expression: ASTInlineExpression): + return [spikes_var.get_name() for _, spikes_var in self.get_extracted_kernel_args(inline_expression)] + + def visit_kernel(self, node): + self.current_kernel = node + self.inside_kernel = True + if self.inside_equations_block: + kernel_name = node.get_variables()[0].get_name_of_lhs() + self.kernel_name_to_kernel[kernel_name] = node + + def visit_function_call(self, node): + if self.inside_equations_block: + if self.inside_inline_expression and self.inside_simple_expression: + if node.get_name() == "convolve": + self.inside_kernel_call = True + kernel, spikes = node.get_args() + kernel_var = kernel.get_variables()[0] + spikes_var = spikes.get_variables()[0] + if "mechanism::receptor" in [(e.namespace + "::" + e.name) for e in self.current_inline_expression.get_decorators()]: + self.inline_expression_to_kernel_args[self.current_inline_expression].add( + (kernel_var, spikes_var)) + else: + self.inline_expression_to_function_calls[self.current_inline_expression].add( + node) + if self.inside_kernel and self.inside_simple_expression: + self.kernel_to_function_calls[self.current_kernel].add(node) + + def endvisit_function_call(self, node): + self.inside_kernel_call = False + + def endvisit_kernel(self, node): + self.current_kernel = None + self.inside_kernel = False + + def visit_variable(self, node): + if self.inside_inline_expression and not self.inside_kernel_call: + self.inline_expression_to_variables[self.current_inline_expression].add( + node) + elif self.inside_kernel and (self.inside_expression or self.inside_simple_expression): + self.kernel_to_rhs_variables[self.current_kernel].add(node) + elif self.inside_declaration and self.inside_expression: + declared_variable = self.current_declaration.get_variables()[ + 0].get_name() + self.declaration_to_rhs_variables[declared_variable].add(node) + + def visit_inline_expression(self, node): + self.inside_inline_expression = True + self.current_inline_expression = node + + def endvisit_inline_expression(self, node): + self.inside_inline_expression = False + self.current_inline_expression = None + + def visit_equations_block(self, node): + self.inside_equations_block = True + + def endvisit_equations_block(self, node): + self.inside_equations_block = False + + def visit_input_block(self, node): + self.inside_input_block = True + + def visit_input_port(self, node): + self.input_port_name_to_input_port[node.get_name()] = node + + def endvisit_input_block(self, node): + self.inside_input_block = False + + def visit_block_with_variables(self, node): + if node.is_state: + self.inside_state_block = True + if node.is_parameters: + self.inside_parameter_block = True + if node.is_internals: + self.inside_internals_block = True + + def endvisit_block_with_variables(self, node): + if node.is_state: + self.inside_state_block = False + if node.is_parameters: + self.inside_parameter_block = False + if node.is_internals: + self.inside_internals_block = False + + def visit_simple_expression(self, node): + self.inside_simple_expression = True + self.current_simple_expression = node + + def endvisit_simple_expression(self, node): + self.inside_simple_expression = False + self.current_simple_expression = None + + def visit_declaration(self, node): + self.inside_declaration = True + self.current_declaration = node + + # collect decalarations generally + variable_name = node.get_variables()[0].get_name() + self.variable_name_to_declaration[variable_name] = node + + # collect declarations per block + if self.inside_parameter_block: + self.parameter_name_to_declaration[variable_name] = node + elif self.inside_state_block: + self.state_name_to_declaration[variable_name] = node + elif self.inside_internals_block: + self.internal_var_name_to_declaration[variable_name] = node + + def endvisit_declaration(self, node): + self.inside_declaration = False + self.current_declaration = None + + def visit_expression(self, node): + self.inside_expression = True + self.current_expression = node + + def endvisit_expression(self, node): + self.inside_expression = False + self.current_expression = None + + # this method was copied over from ast_transformer + # in order to avoid a circular dependency + @staticmethod + def construct_kernel_X_spike_buf_name(kernel_var_name: str, spike_input_port, order: int, diff_order_symbol="__d"): + assert type(kernel_var_name) is str + assert type(order) is int + assert type(diff_order_symbol) is str + return kernel_var_name.replace("$", "__DOLLAR") + "__X__" + str(spike_input_port) + diff_order_symbol * order diff --git a/pynestml/utils/ast_utils.py b/pynestml/utils/ast_utils.py index 0070a5832..1e8b28bb4 100644 --- a/pynestml/utils/ast_utils.py +++ b/pynestml/utils/ast_utils.py @@ -1240,9 +1240,14 @@ def construct_kernel_X_spike_buf_name(cls, kernel_var_name: str, spike_input_por if isinstance(spike_input_port, ASTSimpleExpression): spike_input_port = spike_input_port.get_variable() - spike_input_port_name = spike_input_port.get_name() - if spike_input_port.has_vector_parameter(): - spike_input_port_name += "_" + str(cls.get_numeric_vector_size(spike_input_port)) + if not isinstance(spike_input_port, str): + spike_input_port_name = spike_input_port.get_name() + else: + spike_input_port_name = spike_input_port + + if isinstance(spike_input_port, ASTVariable): + if spike_input_port.has_vector_parameter(): + spike_input_port_name += "_" + str(cls.get_numeric_vector_size(spike_input_port)) return kernel_var_name.replace("$", "__DOLLAR") + "__X__" + spike_input_port_name + diff_order_symbol * order @@ -1630,7 +1635,7 @@ def update_initial_values_for_odes(cls, neuron: ASTNeuron, solver_dicts: List[di @classmethod def create_initial_values_for_kernels(cls, neuron: ASTNeuron, solver_dicts: List[dict], kernels: List[ASTKernel]) -> None: - r""" + """ Add the variables used in kernels from the ode-toolbox result dictionary as ODEs in NESTML AST """ for solver_dict in solver_dicts: @@ -1658,7 +1663,7 @@ def create_initial_values_for_kernels(cls, neuron: ASTNeuron, solver_dicts: List if differential_order: type_str = "*s**-" + str(differential_order) - expr = "0 " + type_str # for kernels, "initial value" returned by ode-toolbox is actually the increment value; the actual initial value is assumed to be 0 + expr = "0 " + type_str # for kernels, "initial value" returned by ode-toolbox is actually the increment value; the actual initial value is 0 (property of the convolution) if not cls.declaration_in_state_block(neuron, var_name): cls.add_declaration_to_state_block(neuron, var_name, expr, type_str) @@ -1802,20 +1807,25 @@ def replace_inline_expressions_through_defining_expressions(cls, definitions: Se from pynestml.visitors.ast_symbol_table_visitor import ASTSymbolTableVisitor for m in inline_expressions: - source_position = m.get_source_position() - for target in definitions: - matcher = re.compile(cls._variable_matching_template.format(m.get_variable_name())) - target_definition = str(target.get_rhs()) - target_definition = re.sub(matcher, "(" + str(m.get_expression()) + ")", target_definition) - target.rhs = ModelParser.parse_expression(target_definition) - target.update_scope(m.get_scope()) - target.accept(ASTSymbolTableVisitor()) - - def log_set_source_position(node): - if node.get_source_position().is_added_source_position(): - node.set_source_position(source_position) - - target.accept(ASTHigherOrderVisitor(visit_funcs=log_set_source_position)) + if "mechanism" not in [e.namespace for e in m.get_decorators()]: + """ + exclude compartmental mechanism definitions in order to have the + inline as a barrier inbetween odes that are meant to be solved independently + """ + source_position = m.get_source_position() + for target in definitions: + matcher = re.compile(cls._variable_matching_template.format(m.get_variable_name())) + target_definition = str(target.get_rhs()) + target_definition = re.sub(matcher, "(" + str(m.get_expression()) + ")", target_definition) + target.rhs = ModelParser.parse_expression(target_definition) + target.update_scope(m.get_scope()) + target.accept(ASTSymbolTableVisitor()) + + def log_set_source_position(node): + if node.get_source_position().is_added_source_position(): + node.set_source_position(source_position) + + target.accept(ASTHigherOrderVisitor(visit_funcs=log_set_source_position)) return definitions diff --git a/pynestml/utils/chan_info_enricher.py b/pynestml/utils/chan_info_enricher.py new file mode 100644 index 000000000..e3f4c1459 --- /dev/null +++ b/pynestml/utils/chan_info_enricher.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# +# chan_info_enricher.py +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . + +from pynestml.utils.model_parser import ModelParser +from pynestml.visitors.ast_symbol_table_visitor import ASTSymbolTableVisitor +import sympy + +from pynestml.utils.mechs_info_enricher import MechsInfoEnricher + + +class ChanInfoEnricher(MechsInfoEnricher): + """ + Class extends MechsInfoEnricher by the computation of the inline derivative. This hasn't been done in the + channel processing because it would cause a circular dependency through the coco checks used by the ModelParser + which we need to use. + """ + + def __init__(self, params): + super(MechsInfoEnricher, self).__init__(params) + + @classmethod + def enrich_mechanism_specific(cls, neuron, mechs_info): + mechs_info = cls.compute_expression_derivative(mechs_info) + return mechs_info + + @classmethod + def compute_expression_derivative(cls, chan_info): + for ion_channel_name, ion_channel_info in chan_info.items(): + inline_expression = chan_info[ion_channel_name]["root_expression"] + expr_str = str(inline_expression.get_expression()) + sympy_expr = sympy.parsing.sympy_parser.parse_expr(expr_str) + sympy_expr = sympy.diff(sympy_expr, "v_comp") + + ast_expression_d = ModelParser.parse_expression(str(sympy_expr)) + # copy scope of the original inline_expression into the the derivative + ast_expression_d.update_scope(inline_expression.get_scope()) + ast_expression_d.accept(ASTSymbolTableVisitor()) + + chan_info[ion_channel_name]["inline_derivative"] = ast_expression_d + + return chan_info diff --git a/pynestml/utils/channel_processing.py b/pynestml/utils/channel_processing.py new file mode 100644 index 000000000..b35822b87 --- /dev/null +++ b/pynestml/utils/channel_processing.py @@ -0,0 +1,88 @@ +# -*- coding: utf-8 -*- +# +# channel_processing.py +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . + +from pynestml.utils.mechanism_processing import MechanismProcessing + +import sympy +import re + + +class ChannelProcessing(MechanismProcessing): + """Extends MechanismProcessing. Searches for Variables that if 0 lead to the root expression always beeing zero so + that the computation can be skipped during the simulation""" + + mechType = "channel" + + def __init__(self, params): + super(MechanismProcessing, self).__init__(params) + + @classmethod + def collect_information_for_specific_mech_types(cls, neuron, mechs_info): + mechs_info = cls.write_key_zero_parameters_for_root_inlines(mechs_info) + + return mechs_info + + @classmethod + def check_if_key_zero_var_for_expression(cls, rhs_expression_str, var_str): + """ + check if var being zero leads to the expression always being zero so that + the computation may be skipped if this is determined to be the case during simulation. + """ + if not re.search("1/.*", rhs_expression_str): + sympy_expression = sympy.parsing.sympy_parser.parse_expr(rhs_expression_str, evaluate=False) + if isinstance(sympy_expression, sympy.core.add.Add) \ + and cls.check_if_key_zero_var_for_expression(str(sympy_expression.args[0]), var_str) \ + and cls.check_if_key_zero_var_for_expression(str(sympy_expression.args[1]), var_str): + return True + + if isinstance(sympy_expression, sympy.core.mul.Mul) \ + and (cls.check_if_key_zero_var_for_expression(str(sympy_expression.args[0]), var_str) + or cls.check_if_key_zero_var_for_expression(str(sympy_expression.args[1]), var_str)): + return True + + if rhs_expression_str == var_str: + return True + + return False + + return False + + @classmethod + def search_for_key_zero_parameters_for_expression(cls, rhs_expression_str, parameters): + """ + Searching for parameters in the root-expression that if zero lead to the expression always being zero so that + the computation may be skipped. + """ + key_zero_parameters = list() + for parameter_name, parameter_info in parameters.items(): + if cls.check_if_key_zero_var_for_expression(rhs_expression_str, parameter_name): + key_zero_parameters.append(parameter_name) + + return key_zero_parameters + + @classmethod + def write_key_zero_parameters_for_root_inlines(cls, chan_info): + for channel_name, channel_info in chan_info.items(): + root_inline_rhs = cls._ode_toolbox_printer.print(channel_info["root_expression"].get_expression()) + chan_info[channel_name]["RootInlineKeyZeros"] = cls.search_for_key_zero_parameters_for_expression( + root_inline_rhs, channel_info["Parameters"]) + + return chan_info diff --git a/pynestml/utils/conc_info_enricher.py b/pynestml/utils/conc_info_enricher.py new file mode 100644 index 000000000..e4ed0507d --- /dev/null +++ b/pynestml/utils/conc_info_enricher.py @@ -0,0 +1,28 @@ +# -*- coding: utf-8 -*- +# +# conc_info_enricher.py +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . + +from pynestml.utils.mechs_info_enricher import MechsInfoEnricher + + +class ConcInfoEnricher(MechsInfoEnricher): + """Just created for consistency. No more than the base-class enriching needs to be done""" + def __init__(self, params): + super(MechsInfoEnricher, self).__init__(params) diff --git a/pynestml/utils/concentration_processing.py b/pynestml/utils/concentration_processing.py new file mode 100644 index 000000000..b72a6600a --- /dev/null +++ b/pynestml/utils/concentration_processing.py @@ -0,0 +1,104 @@ +# -*- coding: utf-8 -*- +# +# concentration_processing.py +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . + +from pynestml.utils.mechanism_processing import MechanismProcessing +from collections import defaultdict + +import sympy +import re + + +class ConcentrationProcessing(MechanismProcessing): + """The default Processing ignores the root expression when solving the odes which in case of the concentration + mechanism is a ode that needs to be solved. This is added here.""" + mechType = "concentration" + + def __init__(self, params): + super(MechanismProcessing, self).__init__(params) + + @classmethod + def collect_information_for_specific_mech_types(cls, neuron, mechs_info): + mechs_info = cls.ode_toolbox_processing_for_root_expression(neuron, mechs_info) + mechs_info = cls.write_key_zero_parameters_for_root_odes(mechs_info) + + return mechs_info + + @classmethod + def ode_toolbox_processing_for_root_expression(cls, neuron, conc_info): + """applies the ode_toolbox_processing to the root_expression since that was never appended to the list of ODEs + in the base processing and thereby also never went through the ode_toolbox processing""" + for concentration_name, concentration_info in conc_info.items(): + # Create fake mechs_info such that it can be processed by the existing ode_toolbox_processing function. + fake_conc_info = defaultdict() + fake_concentration_info = defaultdict() + fake_concentration_info["ODEs"] = list() + fake_concentration_info["ODEs"].append(concentration_info["root_expression"]) + fake_conc_info["fake"] = fake_concentration_info + + fake_conc_info = cls.ode_toolbox_processing(neuron, fake_conc_info) + + conc_info[concentration_name]["ODEs"] = conc_info[concentration_name]["ODEs"] | fake_conc_info["fake"][ + "ODEs"] + + return conc_info + + @classmethod + def check_if_key_zero_var_for_expression(cls, rhs_expression_str, var_str): + """ + check if var being zero leads to the expression always being zero so that + the computation may be skipped if this is determined to be the case during simulation. + """ + if not re.search("1/.*", rhs_expression_str): + sympy_expression = sympy.parsing.sympy_parser.parse_expr(rhs_expression_str, evaluate=False) + if isinstance(sympy_expression, sympy.core.add.Add) \ + and cls.check_if_key_zero_var_for_expression(str(sympy_expression.args[0]), var_str) \ + and cls.check_if_key_zero_var_for_expression(str(sympy_expression.args[1]), var_str): + return True + + if isinstance(sympy_expression, sympy.core.mul.Mul) \ + and (cls.check_if_key_zero_var_for_expression(str(sympy_expression.args[0]), var_str) + or cls.check_if_key_zero_var_for_expression(str(sympy_expression.args[1]), var_str)): + return True + + if rhs_expression_str == var_str: + return True + + return False + + return False + + @classmethod + def search_for_key_zero_parameters_for_expression(cls, rhs_expression_str, parameters): + key_zero_parameters = list() + for parameter_name, parameter_info in parameters.items(): + if cls.check_if_key_zero_var_for_expression(rhs_expression_str, parameter_name): + key_zero_parameters.append(parameter_name) + + return key_zero_parameters + + @classmethod + def write_key_zero_parameters_for_root_odes(cls, conc_info): + for concentration_name, concentration_info in conc_info.items(): + root_inline_rhs = cls._ode_toolbox_printer.print(concentration_info["root_expression"].get_rhs()) + conc_info[concentration_name]["RootOdeKeyZeros"] = cls.search_for_key_zero_parameters_for_expression( + root_inline_rhs, concentration_info["Parameters"]) + + return conc_info diff --git a/pynestml/utils/logger.py b/pynestml/utils/logger.py index 04eda97c1..89a1e8cf0 100644 --- a/pynestml/utils/logger.py +++ b/pynestml/utils/logger.py @@ -28,6 +28,8 @@ from pynestml.meta_model.ast_node import ASTNode from pynestml.utils.ast_source_location import ASTSourceLocation from pynestml.utils.messages import MessageCode +from pynestml.meta_model.ast_inline_expression import ASTInlineExpression +from pynestml.meta_model.ast_input_port import ASTInputPort class LoggingLevel(Enum): @@ -143,8 +145,15 @@ def log_message(cls, node: ASTNode = None, code: MessageCode = None, message: st if cls.no_print: return if cls.logging_level.value <= log_level.value: + if isinstance(node, ASTInlineExpression): + node_name = node.variable_name + elif node is None: + node_name = "unknown node" + else: + node_name = node.get_name() + to_print = '[' + str(cls.curr_message) + ',' - to_print = (to_print + (node.get_name() + ', ' if node is not None else + to_print = (to_print + (node_name + ', ' if node is not None else cls.current_node.get_name() + ', ' if cls.current_node is not None else 'GLOBAL, ')) to_print = to_print + str(log_level.name) to_print = to_print + (', ' + str(error_position) if error_position is not None else '') + ']: ' diff --git a/pynestml/utils/mechanism_processing.py b/pynestml/utils/mechanism_processing.py new file mode 100644 index 000000000..0860ef312 --- /dev/null +++ b/pynestml/utils/mechanism_processing.py @@ -0,0 +1,215 @@ +# -*- coding: utf-8 -*- +# +# mechanism_processing.py +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . + +from collections import defaultdict +import copy + +from pynestml.meta_model.ast_neuron import ASTNeuron +from pynestml.utils.ast_mechanism_information_collector import ASTMechanismInformationCollector + +from pynestml.utils.ast_utils import ASTUtils +from pynestml.codegeneration.printers.nestml_printer import NESTMLPrinter + +from pynestml.codegeneration.printers.constant_printer import ConstantPrinter +from pynestml.codegeneration.printers.ode_toolbox_expression_printer import ODEToolboxExpressionPrinter +from pynestml.codegeneration.printers.ode_toolbox_function_call_printer import ODEToolboxFunctionCallPrinter +from pynestml.codegeneration.printers.ode_toolbox_variable_printer import ODEToolboxVariablePrinter +from pynestml.codegeneration.printers.unitless_cpp_simple_expression_printer import UnitlessCppSimpleExpressionPrinter +from odetoolbox import analysis + +from pynestml.meta_model.ast_expression import ASTExpression +from pynestml.meta_model.ast_simple_expression import ASTSimpleExpression + + +class MechanismProcessing(object): + """Manages the collection of basic information necesary for all types of mechanisms and uses the + collect_information_for_specific_mech_types interface that needs to be implemented by the specific mechanism type + processing classes""" + + # used to keep track of whenever check_co_co was already called + # see inside check_co_co + first_time_run = defaultdict(lambda: defaultdict(lambda: True)) + # stores syns_info from the first call of check_co_co + mechs_info = defaultdict(lambda: defaultdict()) + + mechType = str() + + # ODE-toolbox printers + _constant_printer = ConstantPrinter() + _ode_toolbox_variable_printer = ODEToolboxVariablePrinter(None) + _ode_toolbox_function_call_printer = ODEToolboxFunctionCallPrinter(None) + _ode_toolbox_printer = ODEToolboxExpressionPrinter( + simple_expression_printer=UnitlessCppSimpleExpressionPrinter( + variable_printer=_ode_toolbox_variable_printer, + constant_printer=_constant_printer, + function_call_printer=_ode_toolbox_function_call_printer)) + + _ode_toolbox_variable_printer._expression_printer = _ode_toolbox_printer + _ode_toolbox_function_call_printer._expression_printer = _ode_toolbox_printer + + @classmethod + def prepare_equations_for_ode_toolbox(cls, neuron, mechs_info): + """Transforms the collected ode equations to the required input format of ode-toolbox and adds it to the + mechs_info dictionary""" + for mechanism_name, mechanism_info in mechs_info.items(): + mechanism_odes = defaultdict() + for ode in mechanism_info["ODEs"]: + nestml_printer = NESTMLPrinter() + ode_nestml_expression = nestml_printer.print_ode_equation(ode) + mechanism_odes[ode.lhs.name] = defaultdict() + mechanism_odes[ode.lhs.name]["ASTOdeEquation"] = ode + mechanism_odes[ode.lhs.name]["ODENestmlExpression"] = ode_nestml_expression + mechs_info[mechanism_name]["ODEs"] = mechanism_odes + + for mechanism_name, mechanism_info in mechs_info.items(): + for ode_variable_name, ode_info in mechanism_info["ODEs"].items(): + # Expression: + odetoolbox_indict = {"dynamics": []} + lhs = ASTUtils.to_ode_toolbox_name(ode_info["ASTOdeEquation"].get_lhs().get_complete_name()) + rhs = cls._ode_toolbox_printer.print(ode_info["ASTOdeEquation"].get_rhs()) + entry = {"expression": lhs + " = " + rhs, "initial_values": {}} + + # Initial values: + symbol_order = ode_info["ASTOdeEquation"].get_lhs().get_differential_order() + for order in range(symbol_order): + iv_symbol_name = ode_info["ASTOdeEquation"].get_lhs().get_name() + "'" * order + initial_value_expr = neuron.get_initial_value(iv_symbol_name) + entry["initial_values"][ + ASTUtils.to_ode_toolbox_name(iv_symbol_name)] = cls._ode_toolbox_printer.print( + initial_value_expr) + + odetoolbox_indict["dynamics"].append(entry) + mechs_info[mechanism_name]["ODEs"][ode_variable_name]["ode_toolbox_input"] = odetoolbox_indict + + return mechs_info + + @classmethod + def collect_raw_odetoolbox_output(cls, mechs_info): + """calls ode-toolbox for each ode individually and collects the raw output""" + for mechanism_name, mechanism_info in mechs_info.items(): + for ode_variable_name, ode_info in mechanism_info["ODEs"].items(): + solver_result = analysis(ode_info["ode_toolbox_input"], disable_stiffness_check=True) + mechs_info[mechanism_name]["ODEs"][ode_variable_name]["ode_toolbox_output"] = solver_result + + return mechs_info + + @classmethod + def ode_toolbox_processing(cls, neuron, mechs_info): + mechs_info = cls.prepare_equations_for_ode_toolbox(neuron, mechs_info) + mechs_info = cls.collect_raw_odetoolbox_output(mechs_info) + return mechs_info + + @classmethod + def collect_information_for_specific_mech_types(cls, neuron, mechs_info): + # to be implemented for specific mechanisms by child class (concentration, synapse, channel) + pass + + @classmethod + def determine_dependencies(cls, mechs_info): + for mechanism_name, mechanism_info in mechs_info.items(): + dependencies = list() + for inline in mechanism_info["SecondaryInlineExpressions"]: + if isinstance(inline.get_decorators(), list): + if "mechanism" in [e.namespace for e in inline.get_decorators()]: + dependencies.append(inline) + for ode in mechanism_info["ODEs"]: + if isinstance(ode.get_decorators(), list): + if "mechanism" in [e.namespace for e in ode.get_decorators()]: + dependencies.append(ode) + mechs_info[mechanism_name]["dependencies"] = dependencies + return mechs_info + + @classmethod + def get_mechs_info(cls, neuron: ASTNeuron): + """ + returns previously generated mechs_info + as a deep copy so it can't be changed externally + via object references + :param neuron: a single neuron instance. + :type neuron: ASTNeuron + """ + + return copy.deepcopy(cls.mechs_info[neuron][cls.mechType]) + + @classmethod + def check_co_co(cls, neuron: ASTNeuron): + """ + Checks if mechanism conditions apply for the handed over neuron. + :param neuron: a single neuron instance. + :type neuron: ASTNeuron + """ + + # make sure we only run this a single time + # subsequent calls will be after AST has been transformed + # and there would be no kernels or inlines any more + if cls.first_time_run[neuron][cls.mechType]: + # collect root expressions and initialize collector + info_collector = ASTMechanismInformationCollector(neuron) + mechs_info = info_collector.detect_mechs(cls.mechType) + + # collect and process all basic mechanism information + mechs_info = info_collector.collect_mechanism_related_definitions(neuron, mechs_info) + mechs_info = info_collector.extend_variables_with_initialisations(neuron, mechs_info) + mechs_info = cls.ode_toolbox_processing(neuron, mechs_info) + + # collect and process all mechanism type specific information + mechs_info = cls.collect_information_for_specific_mech_types(neuron, mechs_info) + + cls.mechs_info[neuron][cls.mechType] = mechs_info + cls.first_time_run[neuron][cls.mechType] = False + + @classmethod + def print_element(cls, name, element, rec_step): + message = "" + for indent in range(rec_step): + message += "----" + message += name + ": " + if isinstance(element, defaultdict): + message += "\n" + message += cls.print_dictionary(element, rec_step + 1) + else: + if hasattr(element, 'name'): + message += element.name + elif isinstance(element, str): + message += element + elif isinstance(element, dict): + message += "\n" + message += cls.print_dictionary(element, rec_step + 1) + elif isinstance(element, list): + for index in range(len(element)): + message += "\n" + message += cls.print_element(str(index), element[index], rec_step + 1) + elif isinstance(element, ASTExpression) or isinstance(element, ASTSimpleExpression): + message += cls._ode_toolbox_printer.print(element) + + message += "(" + type(element).__name__ + ")" + return message + + @classmethod + def print_dictionary(cls, dictionary, rec_step): + """ + Print the mechanisms info dictionaries. + """ + message = "" + for name, element in dictionary.items(): + message += cls.print_element(name, element, rec_step) + message += "\n" + return message diff --git a/pynestml/utils/mechs_info_enricher.py b/pynestml/utils/mechs_info_enricher.py new file mode 100644 index 000000000..310a4c43e --- /dev/null +++ b/pynestml/utils/mechs_info_enricher.py @@ -0,0 +1,175 @@ +# -*- coding: utf-8 -*- +# +# mechs_info_enricher.py +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . + +from pynestml.meta_model.ast_neuron import ASTNeuron +from pynestml.utils.model_parser import ModelParser +from pynestml.visitors.ast_symbol_table_visitor import ASTSymbolTableVisitor +from pynestml.symbols.symbol import SymbolKind +from pynestml.visitors.ast_visitor import ASTVisitor +from pynestml.symbols.predefined_functions import PredefinedFunctions +from collections import defaultdict +from pynestml.utils.ast_utils import ASTUtils + + +class MechsInfoEnricher: + """ + Adds information collection that can't be done in the processing class since that is used in the cocos. + Here we use the ModelParser which would lead to a cyclic dependency. + """ + + def __init__(self): + pass + + @classmethod + def enrich_with_additional_info(cls, neuron: ASTNeuron, mechs_info: dict): + mechs_info = cls.transform_ode_solutions(neuron, mechs_info) + mechs_info = cls.enrich_mechanism_specific(neuron, mechs_info) + return mechs_info + + @classmethod + def transform_ode_solutions(cls, neuron, mechs_info): + for mechanism_name, mechanism_info in mechs_info.items(): + for ode_var_name, ode_info in mechanism_info["ODEs"].items(): + mechanism_info["ODEs"][ode_var_name]["transformed_solutions"] = list() + + for ode_solution_index in range(len(ode_info["ode_toolbox_output"])): + solution_transformed = defaultdict() + solution_transformed["states"] = defaultdict() + solution_transformed["propagators"] = defaultdict() + + for variable_name, rhs_str in ode_info["ode_toolbox_output"][ode_solution_index]["initial_values"].items(): + variable = neuron.get_equations_blocks()[0].get_scope().resolve_to_symbol(variable_name, + SymbolKind.VARIABLE) + + expression = ModelParser.parse_expression(rhs_str) + # pretend that update expressions are in "equations" block, + # which should always be present, as synapses have been + # defined to get here + expression.update_scope(neuron.get_equations_blocks()[0].get_scope()) + expression.accept(ASTSymbolTableVisitor()) + + update_expr_str = ode_info["ode_toolbox_output"][ode_solution_index]["update_expressions"][ + variable_name] + update_expr_ast = ModelParser.parse_expression( + update_expr_str) + # pretend that update expressions are in "equations" block, + # which should always be present, as differential equations + # must have been defined to get here + update_expr_ast.update_scope( + neuron.get_equations_blocks()[0].get_scope()) + update_expr_ast.accept(ASTSymbolTableVisitor()) + + solution_transformed["states"][variable_name] = { + "ASTVariable": variable, + "init_expression": expression, + "update_expression": update_expr_ast, + } + for variable_name, rhs_str in ode_info["ode_toolbox_output"][ode_solution_index]["propagators"].items(): + prop_variable = neuron.get_equations_blocks()[0].get_scope().resolve_to_symbol(variable_name, + SymbolKind.VARIABLE) + if prop_variable is None: + ASTUtils.add_declarations_to_internals( + neuron, ode_info["ode_toolbox_output"][ode_solution_index]["propagators"]) + prop_variable = neuron.get_equations_blocks()[0].get_scope().resolve_to_symbol( + variable_name, + SymbolKind.VARIABLE) + + expression = ModelParser.parse_expression(rhs_str) + # pretend that update expressions are in "equations" block, + # which should always be present, as synapses have been + # defined to get here + expression.update_scope( + neuron.get_equations_blocks()[0].get_scope()) + expression.accept(ASTSymbolTableVisitor()) + + solution_transformed["propagators"][variable_name] = { + "ASTVariable": prop_variable, "init_expression": expression, } + expression_variable_collector = ASTEnricherInfoCollectorVisitor() + expression.accept(expression_variable_collector) + + neuron_internal_declaration_collector = ASTEnricherInfoCollectorVisitor() + neuron.accept(neuron_internal_declaration_collector) + + for variable in expression_variable_collector.all_variables: + for internal_declaration in neuron_internal_declaration_collector.internal_declarations: + if variable.get_name() == internal_declaration.get_variables()[0].get_name() \ + and internal_declaration.get_expression().is_function_call() \ + and internal_declaration.get_expression().get_function_call().callee_name == \ + PredefinedFunctions.TIME_RESOLUTION: + mechanism_info["time_resolution_var"] = variable + + mechanism_info["ODEs"][ode_var_name]["transformed_solutions"].append(solution_transformed) + + return mechs_info + + @classmethod + def enrich_mechanism_specific(cls, neuron, mechs_info): + return mechs_info + + +class ASTEnricherInfoCollectorVisitor(ASTVisitor): + + def __init__(self): + super(ASTEnricherInfoCollectorVisitor, self).__init__() + self.inside_variable = False + self.inside_block_with_variables = False + self.all_states = list() + self.all_parameters = list() + self.inside_states_block = False + self.inside_parameters_block = False + self.all_variables = list() + self.inside_internals_block = False + self.inside_declaration = False + self.internal_declarations = list() + + def visit_block_with_variables(self, node): + self.inside_block_with_variables = True + if node.is_state: + self.inside_states_block = True + if node.is_parameters: + self.inside_parameters_block = True + if node.is_internals: + self.inside_internals_block = True + + def endvisit_block_with_variables(self, node): + self.inside_states_block = False + self.inside_parameters_block = False + self.inside_block_with_variables = False + self.inside_internals_block = False + + def visit_variable(self, node): + self.inside_variable = True + self.all_variables.append(node.clone()) + if self.inside_states_block: + self.all_states.append(node.clone()) + if self.inside_parameters_block: + self.all_parameters.append(node.clone()) + + def endvisit_variable(self, node): + self.inside_variable = False + + def visit_declaration(self, node): + self.inside_declaration = True + if self.inside_internals_block: + self.internal_declarations.append(node) + + def endvisit_declaration(self, node): + self.inside_declaration = False diff --git a/pynestml/utils/messages.py b/pynestml/utils/messages.py index 8decb356b..759a66070 100644 --- a/pynestml/utils/messages.py +++ b/pynestml/utils/messages.py @@ -21,6 +21,10 @@ from enum import Enum from typing import Tuple +from pynestml.meta_model.ast_inline_expression import ASTInlineExpression +from collections.abc import Iterable +from pynestml.meta_model.ast_function import ASTFunction + class MessageCode(Enum): """ @@ -118,6 +122,16 @@ class MessageCode(Enum): CREATING_INSTALL_PATH = 89 CREATING_TARGET_PATH = 90 ASSIGNING_TO_INLINE = 91 + CM_NO_GATING_VARIABLES = 100 + CM_FUNCTION_MISSING = 101 + CM_VARIABLES_NOT_DECLARED = 102 + CM_FUNCTION_BAD_NUMBER_ARGS = 103 + CM_FUNCTION_BAD_RETURN_TYPE = 104 + CM_VARIABLE_NAME_MULTI_USE = 105 + CM_NO_VALUE_ASSIGNMENT = 106 + SYNS_BAD_BUFFER_COUNT = 107 + CM_NO_V_COMP = 108 + MECHS_DICTIONARY_INFO = 109 class Messages: @@ -179,7 +193,8 @@ def get_binary_operation_not_defined(cls, lhs, operator, rhs): return MessageCode.OPERATION_NOT_DEFINED, message @classmethod - def get_binary_operation_type_could_not_be_derived(cls, lhs, operator, rhs, lhs_type, rhs_type): + def get_binary_operation_type_could_not_be_derived( + cls, lhs, operator, rhs, lhs_type, rhs_type): message = 'The type of the expression (left-hand side = \'%s\'; binary operator = \'%s\'; right-hand side = \'%s\') could not be derived: left-hand side has type \'%s\' whereas right-hand side has type \'%s\'!' % ( lhs, operator, rhs, lhs_type, rhs_type) return MessageCode.TYPE_MISMATCH, message @@ -196,7 +211,8 @@ def get_convolve_needs_buffer_parameter(cls): @classmethod def get_implicit_magnitude_conversion(cls, lhs, rhs, conversion_factor): - message = 'Implicit magnitude conversion from %s to %s with factor %s ' % (lhs.print_symbol(), rhs.print_symbol(), conversion_factor) + message = 'Implicit magnitude conversion from %s to %s with factor %s ' % ( + lhs.print_symbol(), rhs.print_symbol(), conversion_factor) return MessageCode.IMPLICIT_CAST, message @classmethod @@ -209,7 +225,13 @@ def get_start_building_symbol_table(cls): return MessageCode.START_SYMBOL_TABLE_BUILDING, 'Start building symbol table!' @classmethod - def get_function_call_implicit_cast(cls, arg_nr, function_call, expected_type, got_type, castable=False): + def get_function_call_implicit_cast( + cls, + arg_nr, + function_call, + expected_type, + got_type, + castable=False): """ Returns a message indicating that an implicit cast has been performed. :param arg_nr: the number of the argument which is cast @@ -261,11 +283,17 @@ def get_implicit_cast_rhs_to_lhs(cls, rhs_type, lhs_type): :return: a message :rtype:(MessageCode,str) """ - message = 'Implicit casting from (compatible) type \'%s\' to \'%s\'.' % (rhs_type, lhs_type) + message = 'Implicit casting from (compatible) type \'%s\' to \'%s\'.' % ( + rhs_type, lhs_type) return MessageCode.IMPLICIT_CAST, message @classmethod - def get_different_type_rhs_lhs(cls, rhs_expression, lhs_expression, rhs_type, lhs_type): + def get_different_type_rhs_lhs( + cls, + rhs_expression, + lhs_expression, + rhs_type, + lhs_type): """ Returns a message indicating that the type of the lhs does not correspond to the one of the rhs and can not be cast down to a common type. @@ -281,10 +309,7 @@ def get_different_type_rhs_lhs(cls, rhs_expression, lhs_expression, rhs_type, lh :rtype:(MessageCode,str) """ message = 'Type of lhs \'%s\' does not correspond to rhs \'%s\'! LHS: \'%s\', RHS: \'%s\'!' % ( - lhs_expression, - rhs_expression, - lhs_type.print_symbol(), - rhs_type.print_symbol()) + lhs_expression, rhs_expression, lhs_type.print_symbol(), rhs_type.print_symbol()) return MessageCode.CAST_NOT_POSSIBLE, message @classmethod @@ -300,7 +325,8 @@ def get_type_different_from_expected(cls, expected_type, got_type): """ from pynestml.symbols.type_symbol import TypeSymbol assert (expected_type is not None and isinstance(expected_type, TypeSymbol)), \ - '(PyNestML.Utils.Message) Not a type symbol provided (%s)!' % type(expected_type) + '(PyNestML.Utils.Message) Not a type symbol provided (%s)!' % type( + expected_type) assert (got_type is not None and isinstance(got_type, TypeSymbol)), \ '(PyNestML.Utils.Message) Not a type symbol provided (%s)!' % type(got_type) message = 'Actual type different from expected. Expected: \'%s\', got: \'%s\'!' % ( @@ -344,12 +370,14 @@ def get_input_port_type_not_defined(cls, input_port_name: str): :rtype: (MessageCode,str) """ assert (input_port_name is not None and isinstance(input_port_name, str)), \ - '(PyNestML.Utils.Message) Not a string provided (%s)!' % type(input_port_name) + '(PyNestML.Utils.Message) Not a string provided (%s)!' % type( + input_port_name) message = 'No type declared for spiking input port \'%s\'!' % input_port_name return MessageCode.SPIKE_INPUT_PORT_TYPE_NOT_DEFINED, message @classmethod - def get_model_contains_errors(cls, model_name: str) -> Tuple[MessageCode, str]: + def get_model_contains_errors( + cls, model_name: str) -> Tuple[MessageCode, str]: """ Returns a message indicating that a model contains errors thus no code is generated. :param model_name: the name of the model @@ -361,7 +389,8 @@ def get_model_contains_errors(cls, model_name: str) -> Tuple[MessageCode, str]: return MessageCode.MODEL_CONTAINS_ERRORS, message @classmethod - def get_start_processing_model(cls, model_name: str) -> Tuple[MessageCode, str]: + def get_start_processing_model( + cls, model_name: str) -> Tuple[MessageCode, str]: """ Returns a message indicating that the processing of a model is started. :param model_name: the name of the model @@ -387,7 +416,8 @@ def get_code_generated(cls, model_name, path): '(PyNestML.Utils.Message) Not a string provided (%s)!' % type(model_name) assert (path is not None and isinstance(path, str)), \ '(PyNestML.Utils.Message) Not a string provided (%s)!' % type(path) - message = 'Successfully generated code for the model: \'' + model_name + '\' in: \'' + path + '\' !' + message = 'Successfully generated code for the model: \'' + \ + model_name + '\' in: \'' + path + '\' !' return MessageCode.CODE_SUCCESSFULLY_GENERATED, message @classmethod @@ -475,7 +505,8 @@ def get_first_arg_not_kernel_or_equation(cls, func_name): return MessageCode.ARG_NOT_KERNEL_OR_EQUATION, message @classmethod - def get_second_arg_not_a_spike_port(cls, func_name: str) -> Tuple[MessageCode, str]: + def get_second_arg_not_a_spike_port( + cls, func_name: str) -> Tuple[MessageCode, str]: """ Indicates that the second argument of the NESTML convolve() call is not a spiking input port. :param func_name: the name of the function @@ -527,7 +558,8 @@ def get_continuous_input_port_specified(cls, name, keyword): """ assert (name is not None and isinstance(name, str)), \ '(PyNestML.Utils.Message) Not a string provided (%s)!' % name - message = 'Continuous time input port \'%s\' specified with type keywords (%s)!' % (name, keyword) + message = 'Continuous time input port \'%s\' specified with type keywords (%s)!' % ( + name, keyword) return MessageCode.CONTINUOUS_INPUT_PORT_WITH_QUALIFIERS, message @classmethod @@ -724,7 +756,8 @@ def get_compilation_unit_name_collision(cls, name, art1, art2): '(PyNestML.Utils.Message) Not a string provided (%s)!' % type(art1) assert (art2 is not None and isinstance(art2, str)), \ '(PyNestML.Utils.Message) Not a string provided (%s)!' % type(art2) - message = 'Name collision of \'%s\' in \'%s\' and \'%s\'!' % (name, art1, art2) + message = 'Name collision of \'%s\' in \'%s\' and \'%s\'!' % ( + name, art1, art2) return MessageCode.NAME_COLLISION, message @classmethod @@ -812,7 +845,8 @@ def get_vector_in_non_vector(cls, vector, non_vector): '(PyNestML.Utils.Message) Not a string provided (%s)!' % type(vector) assert (non_vector is not None and isinstance(non_vector, list)), \ '(PyNestML.Utils.Message) Not a string provided (%s)!' % type(non_vector) - message = 'Vector value \'%s\' used in a non-vector declaration of variables \'%s\'!' % (vector, non_vector) + message = 'Vector value \'%s\' used in a non-vector declaration of variables \'%s\'!' % ( + vector, non_vector) return MessageCode.VECTOR_IN_NON_VECTOR, message @classmethod @@ -990,22 +1024,29 @@ def get_not_neuroscience_unit_used(cls, name): return MessageCode.NOT_NEUROSCIENCE_UNIT, message @classmethod - def get_ode_needs_consistent_units(cls, name, differential_order, lhs_type, rhs_type): + def get_ode_needs_consistent_units( + cls, + name, + differential_order, + lhs_type, + rhs_type): assert (name is not None and isinstance(name, str)), \ '(PyNestML.Utils.Message) Not a string provided (%s)!' % type(name) message = 'ODE definition for \'' if differential_order > 1: - message += 'd^' + str(differential_order) + ' ' + name + ' / dt^' + str(differential_order) + '\'' + message += 'd^' + str(differential_order) + ' ' + \ + name + ' / dt^' + str(differential_order) + '\'' if differential_order > 0: message += 'd ' + name + ' / dt\'' else: message += '\'' + str(name) + '\'' - message += ' has inconsistent units: expected \'' + lhs_type.print_symbol() + '\', got \'' + \ - rhs_type.print_symbol() + '\'' + message += ' has inconsistent units: expected \'' + \ + lhs_type.print_symbol() + '\', got \'' + rhs_type.print_symbol() + '\'' return MessageCode.ODE_NEEDS_CONSISTENT_UNITS, message @classmethod - def get_ode_function_needs_consistent_units(cls, name, declared_type, expression_type): + def get_ode_function_needs_consistent_units( + cls, name, declared_type, expression_type): assert (name is not None and isinstance(name, str)), \ '(PyNestML.Utils.Message) Not a string provided (%s)!' % type(name) message = 'ODE function definition for \'' + name + '\' has inconsistent units: expected \'' + \ @@ -1063,7 +1104,8 @@ def templated_arg_types_inconsistent(cls, function_name, failing_arg_idx, other_ """ message = 'In function \'' + function_name + '\': actual derived type of templated parameter ' + \ str(failing_arg_idx + 1) + ' is \'' + failing_arg_type_str + '\', which is inconsistent with that of parameter(s) ' + \ - ', '.join([str(_ + 1) for _ in other_args_idx]) + ', which has/have type \'' + other_type_str + '\'' + ', '.join([str(_ + 1) for _ in other_args_idx]) + \ + ', which has/have type \'' + other_type_str + '\'' return MessageCode.TEMPLATED_ARG_TYPES_INCONSISTENT, message @classmethod @@ -1116,7 +1158,11 @@ def get_emit_spike_function_but_no_output_port(cls): return MessageCode.EMIT_SPIKE_FUNCTION_BUT_NO_OUTPUT_PORT, message @classmethod - def get_kernel_wrong_type(cls, kernel_name: str, differential_order: int, actual_type: str) -> Tuple[MessageCode, str]: + def get_kernel_wrong_type(cls, + kernel_name: str, + differential_order: int, + actual_type: str) -> Tuple[MessageCode, + str]: """ Returns a message indicating that the type of a kernel is wrong. :param kernel_name: the name of the kernel @@ -1135,14 +1181,19 @@ def get_kernel_wrong_type(cls, kernel_name: str, differential_order: int, actual return MessageCode.KERNEL_WRONG_TYPE, message @classmethod - def get_kernel_iv_wrong_type(cls, iv_name: str, actual_type: str, expected_type: str) -> Tuple[MessageCode, str]: + def get_kernel_iv_wrong_type(cls, + iv_name: str, + actual_type: str, + expected_type: str) -> Tuple[MessageCode, + str]: """ Returns a message indicating that the type of a kernel initial value is wrong. :param iv_name: the name of the state variable with an initial value :param actual_type: the name of the actual type that was found in the model :param expected_type: the name of the type that was expected """ - message = 'Initial value \'%s\' was found to be of type \'%s\' (should be %s)!' % (iv_name, actual_type, expected_type) + message = 'Initial value \'%s\' was found to be of type \'%s\' (should be %s)!' % ( + iv_name, actual_type, expected_type) return MessageCode.KERNEL_IV_WRONG_TYPE, message @classmethod @@ -1163,7 +1214,8 @@ def get_equations_defined_but_integrate_odes_not_called(cls): @classmethod def get_template_root_path_created(cls, templates_root_dir: str): message = "Given template root path is not an absolute path. " \ - "Creating the absolute path with default templates directory '" + templates_root_dir + "'" + "Creating the absolute path with default templates directory '" + \ + templates_root_dir + "'" return MessageCode.TEMPLATE_ROOT_PATH_CREATED, message @classmethod @@ -1199,6 +1251,106 @@ def get_function_is_delay_variable(cls, func): message = "Function '" + func + "' is not a function but a delay variable." return MessageCode.DELAY_VARIABLE, message + @classmethod + def get_no_gating_variables( + cls, + cm_inline_expr: ASTInlineExpression, + ion_channel_name: str): + """ + Indicates that if you defined an inline expression inside the equations block + that uses no kernels / has no convolution calls + then then there must be at least one variable name that ends with _{x} + For example an inline "Na" must have at least one variable ending with "_Na" + :return: a message + :rtype: (MessageCode,str) + """ + + message = "No gating variables found inside declaration of '" + \ + cm_inline_expr.variable_name + "', " + message += "\nmeaning no variable ends with the suffix '_" + \ + ion_channel_name + "' here. " + message += "This suffix indicates that a variable is a gating variable. " + message += "At least one gating variable is expected to exist." + + return MessageCode.CM_NO_GATING_VARIABLES, message + + @classmethod + def get_cm_inline_expression_variable_used_mulitple_times( + cls, + cm_inline_expr: ASTInlineExpression, + bad_variable_name: str, + ion_channel_name: str): + message = "Variable name '" + bad_variable_name + \ + "' seems to be used multiple times" + message += "' inside inline expression '" + cm_inline_expr.variable_name + "'. " + message += "\nVariables are not allowed to occur multiple times here." + + return MessageCode.CM_VARIABLE_NAME_MULTI_USE, message + + @classmethod + def get_expected_cm_function_missing( + cls, + ion_channel_name: str, + variable_name: str, + function_name: str): + message = "Implementation of a function called '" + function_name + "' not found. " + message += "It is expected because of variable '" + \ + variable_name + "' in the ion channel '" + ion_channel_name + "'" + return MessageCode.CM_FUNCTION_MISSING, message + + @classmethod + def get_expected_cm_function_wrong_args_count( + cls, ion_channel_name: str, variable_name, astfun: ASTFunction): + message = "Function '" + astfun.name + \ + "' is expected to have exactly one Argument. " + message += "It is related to variable '" + variable_name + \ + "' in the ion channel '" + ion_channel_name + "'" + return MessageCode.CM_FUNCTION_BAD_NUMBER_ARGS, message + + @classmethod + def get_expected_cm_function_bad_return_type( + cls, ion_channel_name: str, astfun: ASTFunction): + message = "'" + ion_channel_name + "' channel function '" + \ + astfun.name + "' must return real. " + return MessageCode.CM_FUNCTION_BAD_RETURN_TYPE, message + + @classmethod + def get_expected_cm_variables_missing_in_blocks( + cls, + missing_variable_to_proper_block: Iterable, + expected_variables_to_reason: dict): + message = "The following variables not found:\n" + for missing_var, proper_location in missing_variable_to_proper_block.items(): + message += "Variable with name '" + missing_var + message += "' not found but expected to exist inside of " + \ + proper_location + " because of position " + message += str( + expected_variables_to_reason[missing_var].get_source_position()) + "\n" + return MessageCode.CM_VARIABLES_NOT_DECLARED, message + + @classmethod + def get_cm_variable_value_missing(cls, varname: str): + message = "The following variable has no value assinged: " + varname + "\n" + return MessageCode.CM_NO_VALUE_ASSIGNMENT, message + + @classmethod + def get_v_comp_variable_value_missing( + cls, neuron_name: str, missing_variable_name): + message = "Missing state variable '" + missing_variable_name + message += "' inside of neuron '" + neuron_name + "'. " + message += "You have passed NEST_COMPARTMENTAL flag to the generator, thereby activating compartmental mode." + message += "In this mode, such variable must be declared in the state block.\n" + message += "This variable represents the dynamically calculated value of membrane potential " + message += "and should be utilized in your equations for voltage activated ion channels." + return MessageCode.CM_NO_V_COMP, message + + @classmethod + def get_syns_bad_buffer_count(cls, buffers: set, synapse_name: str): + message = "Synapse `\'%s\' uses the following input buffers: %s" % ( + synapse_name, buffers) + message += " However exaxtly one spike input buffer per synapse is allowed." + return MessageCode.SYNS_BAD_BUFFER_COUNT, message + @classmethod def get_nest_delay_decorator_not_found(cls): message = "To generate code for NEST Simulator, at least one parameter in the model should be decorated with the ``@nest::delay`` keyword." @@ -1233,3 +1385,12 @@ def get_creating_target_path(cls, target_path: str): def get_creating_install_path(cls, install_path: str): message = "Creating installation directory: '" + install_path + "'" return MessageCode.CREATING_INSTALL_PATH, message + + @classmethod + def get_mechs_dictionary_info(cls, chan_info, syns_info, conc_info): + message = "" + message += "chan_info:\n" + chan_info + "\n" + message += "syns_info:\n" + syns_info + "\n" + message += "conc_info:\n" + conc_info + "\n" + + return MessageCode.MECHS_DICTIONARY_INFO, message diff --git a/pynestml/utils/synapse_processing.py b/pynestml/utils/synapse_processing.py new file mode 100644 index 000000000..6e77e936b --- /dev/null +++ b/pynestml/utils/synapse_processing.py @@ -0,0 +1,231 @@ +# -*- coding: utf-8 -*- +# +# synapse_processing.py +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . + +from pynestml.utils.mechanism_processing import MechanismProcessing +from pynestml.utils.ast_synapse_information_collector import ASTSynapseInformationCollector + +from pynestml.meta_model.ast_neuron import ASTNeuron +from pynestml.meta_model.ast_block_with_variables import ASTBlockWithVariables + +from pynestml.frontend.frontend_configuration import FrontendConfiguration +from pynestml.utils.ast_utils import ASTUtils +from pynestml.symbols.symbol import SymbolKind +from pynestml.utils.logger import Logger, LoggingLevel +from pynestml.utils.messages import Messages + +from odetoolbox import analysis +from collections import defaultdict +import copy + + +class SynapseProcessing(MechanismProcessing): + mechType = "receptor" + + def __init__(self, params): + super(MechanismProcessing, self).__init__(params) + + @classmethod + def collect_information_for_specific_mech_types(cls, neuron, mechs_info): + mechs_info, add_info_collector = cls.collect_additional_base_infos(neuron, mechs_info) + if len(mechs_info) > 0: + # only do this if any synapses found + # otherwise tests may fail + mechs_info = cls.collect_and_check_inputs_per_synapse(mechs_info) + + mechs_info = cls.convolution_ode_toolbox_processing(neuron, mechs_info) + + return mechs_info + + @classmethod + def collect_additional_base_infos(cls, neuron, syns_info): + """ + Collect internals, kernels, inputs and convolutions associated with the synapse. + """ + info_collector = ASTSynapseInformationCollector() + neuron.accept(info_collector) + for synapse_name, synapse_info in syns_info.items(): + synapse_inline = syns_info[synapse_name]["root_expression"] + syns_info[synapse_name][ + "internals_used_declared"] = info_collector.get_synapse_specific_internal_declarations(synapse_inline) + syns_info[synapse_name]["total_used_declared"] = info_collector.get_variable_names_of_synapse( + synapse_inline) + syns_info[synapse_name]["convolutions"] = defaultdict() + + kernel_arg_pairs = info_collector.get_extracted_kernel_args( + synapse_inline) + for kernel_var, spikes_var in kernel_arg_pairs: + kernel_name = kernel_var.get_name() + spikes_name = spikes_var.get_name() + convolution_name = info_collector.construct_kernel_X_spike_buf_name( + kernel_name, spikes_name, 0) + syns_info[synapse_name]["convolutions"][convolution_name] = { + "kernel": { + "name": kernel_name, + "ASTKernel": info_collector.get_kernel_by_name(kernel_name), + }, + "spikes": { + "name": spikes_name, + "ASTInputPort": info_collector.get_input_port_by_name(spikes_name), + }, + } + return syns_info, info_collector + + @classmethod + def collect_and_check_inputs_per_synapse( + cls, + syns_info: dict): + new_syns_info = copy.copy(syns_info) + + # collect all buffers used + for synapse_name, synapse_info in syns_info.items(): + new_syns_info[synapse_name]["buffers_used"] = set() + for convolution_name, convolution_info in synapse_info["convolutions"].items( + ): + input_name = convolution_info["spikes"]["name"] + new_syns_info[synapse_name]["buffers_used"].add(input_name) + + # now make sure each synapse is using exactly one buffer + for synapse_name, synapse_info in syns_info.items(): + buffers = new_syns_info[synapse_name]["buffers_used"] + if len(buffers) != 1: + code, message = Messages.get_syns_bad_buffer_count( + buffers, synapse_name) + causing_object = synapse_info["inline_expression"] + Logger.log_message( + code=code, + message=message, + error_position=causing_object.get_source_position(), + log_level=LoggingLevel.ERROR, + node=causing_object) + + return new_syns_info + + @classmethod + def convolution_ode_toolbox_processing(cls, neuron, syns_info): + if not neuron.get_parameters_blocks(): + return syns_info + + parameters_block = neuron.get_parameters_blocks()[0] + + for synapse_name, synapse_info in syns_info.items(): + for convolution_name, convolution_info in synapse_info["convolutions"].items(): + kernel_buffer = (convolution_info["kernel"]["ASTKernel"], convolution_info["spikes"]["ASTInputPort"]) + convolution_solution = cls.ode_solve_convolution(neuron, parameters_block, kernel_buffer) + syns_info[synapse_name]["convolutions"][convolution_name]["analytic_solution"] = convolution_solution + return syns_info + + @classmethod + def ode_solve_convolution(cls, + neuron: ASTNeuron, + parameters_block: ASTBlockWithVariables, + kernel_buffer): + odetoolbox_indict = cls.create_ode_indict( + neuron, parameters_block, kernel_buffer) + full_solver_result = analysis( + odetoolbox_indict, + disable_stiffness_check=True, + log_level=FrontendConfiguration.logging_level) + analytic_solver = None + analytic_solvers = [ + x for x in full_solver_result if x["solver"] == "analytical"] + assert len( + analytic_solvers) <= 1, "More than one analytic solver not presently supported" + if len(analytic_solvers) > 0: + analytic_solver = analytic_solvers[0] + + return analytic_solver + + @classmethod + def create_ode_indict(cls, + neuron: ASTNeuron, + parameters_block: ASTBlockWithVariables, + kernel_buffer): + kernel_buffers = {tuple(kernel_buffer)} + odetoolbox_indict = cls.transform_ode_and_kernels_to_json( + neuron, parameters_block, kernel_buffers) + odetoolbox_indict["options"] = {} + odetoolbox_indict["options"]["output_timestep_symbol"] = "__h" + return odetoolbox_indict + + @classmethod + def transform_ode_and_kernels_to_json( + cls, + neuron: ASTNeuron, + parameters_block, + kernel_buffers): + """ + Converts AST node to a JSON representation suitable for passing to ode-toolbox. + + Each kernel has to be generated for each spike buffer convolve in which it occurs, e.g. if the NESTML model code contains the statements + + convolve(G, ex_spikes) + convolve(G, in_spikes) + + then `kernel_buffers` will contain the pairs `(G, ex_spikes)` and `(G, in_spikes)`, from which two ODEs will be generated, with dynamical state (variable) names `G__X__ex_spikes` and `G__X__in_spikes`. + + :param parameters_block: ASTBlockWithVariables + :return: Dict + """ + odetoolbox_indict = {"dynamics": []} + + equations_block = neuron.get_equations_blocks()[0] + + for kernel, spike_input_port in kernel_buffers: + if ASTUtils.is_delta_kernel(kernel): + continue + # delta function -- skip passing this to ode-toolbox + + for kernel_var in kernel.get_variables(): + expr = ASTUtils.get_expr_from_kernel_var( + kernel, kernel_var.get_complete_name()) + kernel_order = kernel_var.get_differential_order() + kernel_X_spike_buf_name_ticks = ASTUtils.construct_kernel_X_spike_buf_name( + kernel_var.get_name(), spike_input_port.get_name(), kernel_order, diff_order_symbol="'") + + ASTUtils.replace_rhs_variables(expr, kernel_buffers) + + entry = {"expression": kernel_X_spike_buf_name_ticks + " = " + str(expr), "initial_values": {}} + + # initial values need to be declared for order 1 up to kernel + # order (e.g. none for kernel function f(t) = ...; 1 for kernel + # ODE f'(t) = ...; 2 for f''(t) = ... and so on) + for order in range(kernel_order): + iv_sym_name_ode_toolbox = ASTUtils.construct_kernel_X_spike_buf_name( + kernel_var.get_name(), spike_input_port, order, diff_order_symbol="'") + symbol_name_ = kernel_var.get_name() + "'" * order + symbol = equations_block.get_scope().resolve_to_symbol( + symbol_name_, SymbolKind.VARIABLE) + assert symbol is not None, "Could not find initial value for variable " + symbol_name_ + initial_value_expr = symbol.get_declaring_expression() + assert initial_value_expr is not None, "No initial value found for variable name " + symbol_name_ + entry["initial_values"][iv_sym_name_ode_toolbox] = cls._ode_toolbox_printer.print( + initial_value_expr) + + odetoolbox_indict["dynamics"].append(entry) + + odetoolbox_indict["parameters"] = {} + if parameters_block is not None: + for decl in parameters_block.get_declarations(): + for var in decl.variables: + odetoolbox_indict["parameters"][var.get_complete_name( + )] = cls._ode_toolbox_printer.print(decl.get_expression()) + + return odetoolbox_indict diff --git a/pynestml/utils/syns_info_enricher.py b/pynestml/utils/syns_info_enricher.py new file mode 100644 index 000000000..5d54da19c --- /dev/null +++ b/pynestml/utils/syns_info_enricher.py @@ -0,0 +1,348 @@ +# -*- coding: utf-8 -*- +# +# syns_info_enricher.py +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . + +from _collections import defaultdict +import copy + +from pynestml.meta_model.ast_expression import ASTExpression +from pynestml.meta_model.ast_inline_expression import ASTInlineExpression +from pynestml.meta_model.ast_neuron import ASTNeuron +from pynestml.symbols.predefined_functions import PredefinedFunctions +from pynestml.symbols.symbol import SymbolKind +from pynestml.utils.model_parser import ModelParser +from pynestml.visitors.ast_symbol_table_visitor import ASTSymbolTableVisitor +from pynestml.visitors.ast_visitor import ASTVisitor +import sympy + +from pynestml.utils.mechs_info_enricher import MechsInfoEnricher + + +class SynsInfoEnricher(MechsInfoEnricher): + """ + input: a neuron after ODE-toolbox transformations + + the kernel analysis solves all kernels at the same time + this splits the variables on per kernel basis + """ + + def __init__(self, params): + super(MechsInfoEnricher, self).__init__(params) + + @classmethod + def enrich_mechanism_specific(cls, neuron, mechs_info): + specific_enricher_visitor = SynsInfoEnricherVisitor() + neuron.accept(specific_enricher_visitor) + mechs_info = cls.transform_convolutions_analytic_solutions(neuron, mechs_info) + mechs_info = cls.restore_order_internals(neuron, mechs_info) + return mechs_info + + @classmethod + def transform_convolutions_analytic_solutions( + cls, + neuron: ASTNeuron, + cm_syns_info: dict): + + enriched_syns_info = copy.copy(cm_syns_info) + for synapse_name, synapse_info in cm_syns_info.items(): + for convolution_name in synapse_info["convolutions"].keys(): + analytic_solution = enriched_syns_info[synapse_name][ + "convolutions"][convolution_name]["analytic_solution"] + analytic_solution_transformed = defaultdict( + lambda: defaultdict()) + + for variable_name, expression_str in analytic_solution["initial_values"].items(): + variable = neuron.get_equations_blocks()[0].get_scope().resolve_to_symbol(variable_name, + SymbolKind.VARIABLE) + + expression = ModelParser.parse_expression(expression_str) + # pretend that update expressions are in "equations" block, + # which should always be present, as synapses have been + # defined to get here + expression.update_scope(neuron.get_equations_blocks()[0].get_scope()) + expression.accept(ASTSymbolTableVisitor()) + + update_expr_str = analytic_solution["update_expressions"][variable_name] + update_expr_ast = ModelParser.parse_expression( + update_expr_str) + # pretend that update expressions are in "equations" block, + # which should always be present, as differential equations + # must have been defined to get here + update_expr_ast.update_scope( + neuron.get_equations_blocks()[0].get_scope()) + update_expr_ast.accept(ASTSymbolTableVisitor()) + + analytic_solution_transformed['kernel_states'][variable_name] = { + "ASTVariable": variable, + "init_expression": expression, + "update_expression": update_expr_ast, + } + + for variable_name, expression_string in analytic_solution["propagators"].items( + ): + variable = SynsInfoEnricherVisitor.internal_variable_name_to_variable[variable_name] + expression = ModelParser.parse_expression( + expression_string) + # pretend that update expressions are in "equations" block, + # which should always be present, as synapses have been + # defined to get here + expression.update_scope( + neuron.get_equations_blocks()[0].get_scope()) + expression.accept(ASTSymbolTableVisitor()) + analytic_solution_transformed['propagators'][variable_name] = { + "ASTVariable": variable, "init_expression": expression, } + + enriched_syns_info[synapse_name]["convolutions"][convolution_name]["analytic_solution"] = \ + analytic_solution_transformed + + # only one buffer allowed, so allow direct access + # to it instead of a list + if "buffer_name" not in enriched_syns_info[synapse_name]: + buffers_used = list( + enriched_syns_info[synapse_name]["buffers_used"]) + del enriched_syns_info[synapse_name]["buffers_used"] + enriched_syns_info[synapse_name]["buffer_name"] = buffers_used[0] + + inline_expression_name = enriched_syns_info[synapse_name]["root_expression"].variable_name + enriched_syns_info[synapse_name]["root_expression"] = \ + SynsInfoEnricherVisitor.inline_name_to_transformed_inline[inline_expression_name] + enriched_syns_info[synapse_name]["inline_expression_d"] = \ + cls.compute_expression_derivative( + enriched_syns_info[synapse_name]["root_expression"]) + + # now also identify analytic helper variables such as __h + enriched_syns_info[synapse_name]["analytic_helpers"] = cls.get_analytic_helper_variable_declarations( + enriched_syns_info[synapse_name]) + + return enriched_syns_info + + @classmethod + def restore_order_internals(cls, neuron: ASTNeuron, cm_syns_info: dict): + """orders user defined internals + back to the order they were originally defined + this is important if one such variable uses another + user needs to have control over the order + assign each variable a rank + that corresponds to the order in + SynsInfoEnricher.declarations_ordered""" + variable_name_to_order = {} + for index, declaration in enumerate( + SynsInfoEnricherVisitor.declarations_ordered): + variable_name = declaration.get_variables()[0].get_name() + variable_name_to_order[variable_name] = index + + enriched_syns_info = copy.copy(cm_syns_info) + for synapse_name, synapse_info in cm_syns_info.items(): + user_internals = enriched_syns_info[synapse_name]["internals_used_declared"] + user_internals_sorted = sorted( + user_internals.items(), key=lambda x: variable_name_to_order[x[0]]) + enriched_syns_info[synapse_name]["internals_used_declared"] = user_internals_sorted + + return enriched_syns_info + + @classmethod + def compute_expression_derivative( + cls, inline_expression: ASTInlineExpression) -> ASTExpression: + expr_str = str(inline_expression.get_expression()) + sympy_expr = sympy.parsing.sympy_parser.parse_expr(expr_str) + sympy_expr = sympy.diff(sympy_expr, "v_comp") + + ast_expression_d = ModelParser.parse_expression(str(sympy_expr)) + # copy scope of the original inline_expression into the the derivative + ast_expression_d.update_scope(inline_expression.get_scope()) + ast_expression_d.accept(ASTSymbolTableVisitor()) + + return ast_expression_d + + @classmethod + def get_variable_names_used(cls, node) -> set: + variable_names_extractor = ASTUsedVariableNamesExtractor(node) + return variable_names_extractor.variable_names + + @classmethod + def get_all_synapse_variables(cls, single_synapse_info): + """returns all variable names referenced by the synapse inline + and by the analytical solution + assumes that the model has already been transformed""" + + # get all variables from transformed inline + inline_variables = cls.get_variable_names_used( + single_synapse_info["root_expression"]) + + analytic_solution_vars = set() + # get all variables from transformed analytic solution + for convolution_name, convolution_info in single_synapse_info["convolutions"].items( + ): + analytic_sol = convolution_info["analytic_solution"] + # get variables from init and update expressions + # for each kernel + for kernel_var_name, kernel_info in analytic_sol["kernel_states"].items( + ): + analytic_solution_vars.add(kernel_var_name) + + update_vars = cls.get_variable_names_used( + kernel_info["update_expression"]) + init_vars = cls.get_variable_names_used( + kernel_info["init_expression"]) + + analytic_solution_vars.update(update_vars) + analytic_solution_vars.update(init_vars) + + # get variables from init expressions + # for each propagator + # include propagator variable itself + for propagator_var_name, propagator_info in analytic_sol["propagators"].items( + ): + analytic_solution_vars.add(propagator_var_name) + + init_vars = cls.get_variable_names_used( + propagator_info["init_expression"]) + + analytic_solution_vars.update(init_vars) + + return analytic_solution_vars.union(inline_variables) + + @classmethod + def get_new_variables_after_transformation(cls, single_synapse_info): + return cls.get_all_synapse_variables(single_synapse_info).difference( + single_synapse_info["total_used_declared"]) + + @classmethod + def get_analytic_helper_variable_names(cls, single_synapse_info): + """get new variables that only occur on the right hand side of analytic solution Expressions + but for wich analytic solution does not offer any values + this can isolate out additional variables that suddenly appear such as __h + whose initial values are not inlcuded in the output of analytic solver""" + + analytic_lhs_vars = set() + + for convolution_name, convolution_info in single_synapse_info["convolutions"].items( + ): + analytic_sol = convolution_info["analytic_solution"] + + # get variables representing convolutions by kernel + for kernel_var_name, kernel_info in analytic_sol["kernel_states"].items( + ): + analytic_lhs_vars.add(kernel_var_name) + + # get propagator variable names + for propagator_var_name, propagator_info in analytic_sol["propagators"].items( + ): + analytic_lhs_vars.add(propagator_var_name) + + return cls.get_new_variables_after_transformation( + single_synapse_info).symmetric_difference(analytic_lhs_vars) + + @classmethod + def get_analytic_helper_variable_declarations(cls, single_synapse_info): + variable_names = cls.get_analytic_helper_variable_names( + single_synapse_info) + result = dict() + for variable_name in variable_names: + if variable_name not in SynsInfoEnricherVisitor.internal_variable_name_to_variable: + continue + variable = SynsInfoEnricherVisitor.internal_variable_name_to_variable[variable_name] + expression = SynsInfoEnricherVisitor.variables_to_internal_declarations[variable] + result[variable_name] = { + "ASTVariable": variable, + "init_expression": expression, + } + if expression.is_function_call() and expression.get_function_call( + ).callee_name == PredefinedFunctions.TIME_RESOLUTION: + result[variable_name]["is_time_resolution"] = True + else: + result[variable_name]["is_time_resolution"] = False + + return result + + +class SynsInfoEnricherVisitor(ASTVisitor): + variables_to_internal_declarations = {} + internal_variable_name_to_variable = {} + inline_name_to_transformed_inline = {} + + # assuming depth first traversal + # collect declaratins in the order + # in which they were present in the neuron + declarations_ordered = [] + + def __init__(self): + super(SynsInfoEnricherVisitor, self).__init__() + + self.inside_parameter_block = False + self.inside_state_block = False + self.inside_internals_block = False + self.inside_inline_expression = False + self.inside_inline_expression = False + self.inside_declaration = False + self.inside_simple_expression = False + + def visit_inline_expression(self, node): + self.inside_inline_expression = True + inline_name = node.variable_name + SynsInfoEnricherVisitor.inline_name_to_transformed_inline[inline_name] = node + + def endvisit_inline_expression(self, node): + self.inside_inline_expression = False + + def visit_block_with_variables(self, node): + if node.is_state: + self.inside_state_block = True + if node.is_parameters: + self.inside_parameter_block = True + if node.is_internals: + self.inside_internals_block = True + + def endvisit_block_with_variables(self, node): + if node.is_state: + self.inside_state_block = False + if node.is_parameters: + self.inside_parameter_block = False + if node.is_internals: + self.inside_internals_block = False + + def visit_simple_expression(self, node): + self.inside_simple_expression = True + + def endvisit_simple_expression(self, node): + self.inside_simple_expression = False + + def visit_declaration(self, node): + self.declarations_ordered.append(node) + self.inside_declaration = True + if self.inside_internals_block: + variable = node.get_variables()[0] + expression = node.get_expression() + SynsInfoEnricherVisitor.variables_to_internal_declarations[variable] = expression + SynsInfoEnricherVisitor.internal_variable_name_to_variable[variable.get_name( + )] = variable + + def endvisit_declaration(self, node): + self.inside_declaration = False + + +class ASTUsedVariableNamesExtractor(ASTVisitor): + def __init__(self, node): + super(ASTUsedVariableNamesExtractor, self).__init__() + self.variable_names = set() + node.accept(self) + + def visit_variable(self, node): + self.variable_names.add(node.get_name()) diff --git a/pynestml/visitors/ast_builder_visitor.py b/pynestml/visitors/ast_builder_visitor.py index 8d7e35329..cb6ba102e 100644 --- a/pynestml/visitors/ast_builder_visitor.py +++ b/pynestml/visitors/ast_builder_visitor.py @@ -284,9 +284,15 @@ def visitInlineExpression(self, ctx): variable_name = (str(ctx.variableName.text) if ctx.variableName is not None else None) data_type = (self.visit(ctx.dataType()) if ctx.dataType() is not None else None) expression = (self.visit(ctx.expression()) if ctx.expression() is not None else None) + + decorators = [] + for kw in ctx.anyDecorator(): + decorators.append(self.visit(kw)) + inlineExpr = ASTNodeFactory.create_ast_inline_expression(is_recordable=is_recordable, variable_name=variable_name, data_type=data_type, expression=expression, - source_position=create_source_pos(ctx)) + source_position=create_source_pos(ctx), + decorators=decorators) update_node_comments(inlineExpr, self.__comments.visit(ctx)) return inlineExpr @@ -294,7 +300,13 @@ def visitInlineExpression(self, ctx): def visitOdeEquation(self, ctx): lhs = self.visit(ctx.lhs) if ctx.lhs is not None else None rhs = self.visit(ctx.rhs) if ctx.rhs is not None else None - ode_equation = ASTNodeFactory.create_ast_ode_equation(lhs=lhs, rhs=rhs, source_position=create_source_pos(ctx)) + + decorators = [] + for kw in ctx.anyDecorator(): + decorators.append(self.visit(kw)) + + ode_equation = ASTNodeFactory.create_ast_ode_equation(lhs=lhs, rhs=rhs, source_position=create_source_pos(ctx), + decorators=decorators) update_node_comments(ode_equation, self.__comments.visit(ctx)) return ode_equation @@ -488,10 +500,10 @@ def visitNeuron(self, ctx): return neuron def visitNamespaceDecoratorNamespace(self, ctx): - return ctx.NAME() + return str(ctx.NAME()) def visitNamespaceDecoratorName(self, ctx): - return ctx.NAME() + return str(ctx.NAME()) def visitAnyDecorator(self, ctx): from pynestml.generated.PyNestMLLexer import PyNestMLLexer diff --git a/pynestml/visitors/ast_symbol_table_visitor.py b/pynestml/visitors/ast_symbol_table_visitor.py index 5dcd7b2a9..d02b54588 100644 --- a/pynestml/visitors/ast_symbol_table_visitor.py +++ b/pynestml/visitors/ast_symbol_table_visitor.py @@ -63,7 +63,8 @@ def visit_neuron(self, node): code, message = Messages.get_start_building_symbol_table() Logger.log_message(node=node, code=code, error_position=node.get_source_position(), message=message, log_level=LoggingLevel.DEBUG) - scope = Scope(scope_type=ScopeType.GLOBAL, source_position=node.get_source_position()) + scope = Scope(scope_type=ScopeType.GLOBAL, + source_position=node.get_source_position()) node.update_scope(scope) node.get_body().update_scope(scope) # now first, we add all predefined elements to the scope @@ -79,7 +80,8 @@ def visit_neuron(self, node): def endvisit_neuron(self, node): # before following checks occur, we need to ensure several simple properties - CoCosManager.post_symbol_table_builder_checks(node, after_ast_rewrite=self.after_ast_rewrite_) + CoCosManager.post_symbol_table_builder_checks( + node, after_ast_rewrite=self.after_ast_rewrite_) # update the equations for equation_block in node.get_equations_blocks(): @@ -110,7 +112,8 @@ def visit_synapse(self, node): message=message, log_level=LoggingLevel.DEBUG) # before starting the work on the synapse, make everything which was implicit explicit # but if we have a model without an equations block, just skip this step - scope = Scope(scope_type=ScopeType.GLOBAL, source_position=node.get_source_position()) + scope = Scope(scope_type=ScopeType.GLOBAL, + source_position=node.get_source_position()) node.update_scope(scope) node.get_body().update_scope(scope) @@ -149,7 +152,8 @@ def visit_function(self, node): :param node: a function block object. :type node: ast_function """ - self.block_type_stack.push(BlockType.LOCAL) # before entering, update the current node type + self.block_type_stack.push( + BlockType.LOCAL) # before entering, update the current node type symbol = FunctionSymbol(scope=node.get_scope(), element_reference=node, param_types=list(), name=node.get_name(), is_predefined=False, return_type=None) # put it on the stack for the endvisit method @@ -195,7 +199,8 @@ def endvisit_function(self, node): if node.has_return_type(): data_type_visitor = ASTDataTypeVisitor() node.get_return_type().accept(data_type_visitor) - symbol.set_return_type(PredefinedTypes.get_type(data_type_visitor.result)) + symbol.set_return_type( + PredefinedTypes.get_type(data_type_visitor.result)) else: symbol.set_return_type(PredefinedTypes.get_void_type()) self.block_type_stack.pop() # before leaving update the type @@ -308,14 +313,16 @@ def visit_declaration(self, node: ASTDeclaration) -> None: # all declarations in the state block are recordable is_recordable = (node.is_recordable or self.block_type_stack.top() == BlockType.STATE) - init_value = node.get_expression() if self.block_type_stack.top() == BlockType.STATE else None + init_value = node.get_expression( + ) if self.block_type_stack.top() == BlockType.STATE else None # split the decorators in the AST up into namespace decorators and other decorators decorators = [] namespace_decorators = {} for d in node.get_decorators(): if isinstance(d, ASTNamespaceDecorator): - namespace_decorators[str(d.get_namespace())] = str(d.get_name()) + namespace_decorators[str(d.get_namespace())] = str( + d.get_name()) else: decorators.append(d) @@ -502,6 +509,17 @@ def visit_inline_expression(self, node): :param node: a single inline expression. :type node: ASTInlineExpression """ + + # split the decorators in the AST up into namespace decorators and other decorators + decorators = [] + namespace_decorators = {} + for d in node.get_decorators(): + if isinstance(d, ASTNamespaceDecorator): + namespace_decorators[str(d.get_namespace())] = str( + d.get_name()) + else: + decorators.append(d) + data_type_visitor = ASTDataTypeVisitor() node.get_data_type().accept(data_type_visitor) type_symbol = PredefinedTypes.get_type(data_type_visitor.result) diff --git a/requirements.txt b/requirements.txt index 4c4f0120a..125c525c5 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,7 +2,7 @@ numpy >= 1.8.2 scipy sympy >= 1.1.1, <1.11 -antlr4-python3-runtime == 4.10 +antlr4-python3-runtime == 4.13 setuptools Jinja2 >= 2.10 typing;python_version<"3.5" diff --git a/setup.py b/setup.py index d5ec0360d..f37f17a3f 100755 --- a/setup.py +++ b/setup.py @@ -53,6 +53,9 @@ "codegeneration/resources_nest/point_neuron/common/*.jinja2", "codegeneration/resources_nest/point_neuron/directives_cpp/*.jinja2", "codegeneration/resources_nest/point_neuron/setup/*.jinja2", + "codegeneration/resources_nest_compartmental/cm_neuron/*.jinja2", + "codegeneration/resources_nest_compartmental/cm_neuron/directives/*.jinja2", + "codegeneration/resources_nest_compartmental/cm_neuron/setup/*.jinja2", "codegeneration/resources_python_standalone/point_neuron/*.jinja2", "codegeneration/resources_python_standalone/point_neuron/directives_py/*.jinja2", "codegeneration/resources_spinnaker/*.jinja2", diff --git a/tests/cocos_test.py b/tests/cocos_test.py index 53f1daf7f..45a6d9083 100644 --- a/tests/cocos_test.py +++ b/tests/cocos_test.py @@ -38,7 +38,12 @@ class CoCosTest(unittest.TestCase): def setUp(self): Logger.init_logger(LoggingLevel.INFO) - SymbolTable.initialize_symbol_table(ASTSourceLocation(start_line=0, start_column=0, end_line=0, end_column=0)) + SymbolTable.initialize_symbol_table( + ASTSourceLocation( + start_line=0, + start_column=0, + end_line=0, + end_column=0)) PredefinedUnits.register_units() PredefinedTypes.register_types() PredefinedVariables.register_variables() @@ -46,108 +51,158 @@ def setUp(self): def test_invalid_element_defined_after_usage(self): model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), - 'CoCoVariableDefinedAfterUsage.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 1) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'invalid')), + 'CoCoVariableDefinedAfterUsage.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 1) def test_valid_element_defined_after_usage(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), - 'CoCoVariableDefinedAfterUsage.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'valid')), + 'CoCoVariableDefinedAfterUsage.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) def test_invalid_element_in_same_line(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), - 'CoCoElementInSameLine.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 1) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'invalid')), + 'CoCoElementInSameLine.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 1) def test_valid_element_in_same_line(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), - 'CoCoElementInSameLine.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'valid')), + 'CoCoElementInSameLine.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) def test_invalid_integrate_odes_called_if_equations_defined(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), - 'CoCoIntegrateOdesCalledIfEquationsDefined.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 1) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'invalid')), + 'CoCoIntegrateOdesCalledIfEquationsDefined.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 1) def test_valid_integrate_odes_called_if_equations_defined(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), - 'CoCoIntegrateOdesCalledIfEquationsDefined.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'valid')), + 'CoCoIntegrateOdesCalledIfEquationsDefined.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) def test_invalid_element_not_defined_in_scope(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), - 'CoCoVariableNotDefined.nestml')) - self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], - LoggingLevel.ERROR)), 4) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'invalid')), + 'CoCoVariableNotDefined.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 4) def test_valid_element_not_defined_in_scope(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), - 'CoCoVariableNotDefined.nestml')) - self.assertEqual( - len(Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), - 0) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'valid')), + 'CoCoVariableNotDefined.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) def test_variable_with_same_name_as_unit(self): Logger.set_logging_level(LoggingLevel.NO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), - 'CoCoVariableWithSameNameAsUnit.nestml')) - self.assertEqual( - len(Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.WARNING)), - 3) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'valid')), + 'CoCoVariableWithSameNameAsUnit.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.WARNING)), 3) def test_invalid_variable_redeclaration(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), - 'CoCoVariableRedeclared.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 1) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'invalid')), + 'CoCoVariableRedeclared.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 1) def test_valid_variable_redeclaration(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), - 'CoCoVariableRedeclared.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'valid')), + 'CoCoVariableRedeclared.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) def test_invalid_each_block_unique(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), - 'CoCoEachBlockUnique.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 2) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'invalid')), + 'CoCoEachBlockUnique.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 2) def test_valid_each_block_unique(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), - 'CoCoEachBlockUnique.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'valid')), + 'CoCoEachBlockUnique.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) def test_invalid_function_unique_and_defined(self): Logger.set_logging_level(LoggingLevel.INFO) @@ -160,40 +215,60 @@ def test_invalid_function_unique_and_defined(self): def test_valid_function_unique_and_defined(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), - 'CoCoFunctionNotUnique.nestml')) - self.assertEqual( - len(Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'valid')), + 'CoCoFunctionNotUnique.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) def test_invalid_inline_expressions_have_rhs(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), - 'CoCoInlineExpressionHasNoRhs.nestml')) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'invalid')), + 'CoCoInlineExpressionHasNoRhs.nestml')) assert model is None def test_valid_inline_expressions_have_rhs(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), - 'CoCoInlineExpressionHasNoRhs.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'valid')), + 'CoCoInlineExpressionHasNoRhs.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) def test_invalid_inline_expression_has_several_lhs(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), - 'CoCoInlineExpressionWithSeveralLhs.nestml')) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'invalid')), + 'CoCoInlineExpressionWithSeveralLhs.nestml')) assert model is None def test_valid_inline_expression_has_several_lhs(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), - 'CoCoInlineExpressionWithSeveralLhs.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'valid')), + 'CoCoInlineExpressionWithSeveralLhs.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) def test_invalid_no_values_assigned_to_input_ports(self): Logger.set_logging_level(LoggingLevel.INFO) @@ -206,104 +281,158 @@ def test_invalid_no_values_assigned_to_input_ports(self): def test_valid_no_values_assigned_to_input_ports(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), - 'CoCoValueAssignedToInputPort.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'valid')), + 'CoCoValueAssignedToInputPort.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) def test_invalid_order_of_equations_correct(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), - 'CoCoNoOrderOfEquations.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 2) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'invalid')), + 'CoCoNoOrderOfEquations.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 2) def test_valid_order_of_equations_correct(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), - 'CoCoNoOrderOfEquations.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'valid')), + 'CoCoNoOrderOfEquations.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) def test_invalid_numerator_of_unit_one(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), - 'CoCoUnitNumeratorNotOne.nestml')) - self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], - LoggingLevel.ERROR)), 2) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'invalid')), + 'CoCoUnitNumeratorNotOne.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 2) def test_valid_numerator_of_unit_one(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), - 'CoCoUnitNumeratorNotOne.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'valid')), + 'CoCoUnitNumeratorNotOne.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) def test_invalid_names_of_neurons_unique(self): Logger.init_logger(LoggingLevel.INFO) ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), - 'CoCoMultipleNeuronsWithEqualName.nestml')) - self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node(None, LoggingLevel.ERROR)), 1) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'invalid')), + 'CoCoMultipleNeuronsWithEqualName.nestml')) + self.assertEqual( + len(Logger.get_all_messages_of_level_and_or_node(None, LoggingLevel.ERROR)), 1) def test_valid_names_of_neurons_unique(self): Logger.init_logger(LoggingLevel.INFO) ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), - 'CoCoMultipleNeuronsWithEqualName.nestml')) - self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node(None, LoggingLevel.ERROR)), 0) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'valid')), + 'CoCoMultipleNeuronsWithEqualName.nestml')) + self.assertEqual( + len(Logger.get_all_messages_of_level_and_or_node(None, LoggingLevel.ERROR)), 0) def test_invalid_no_nest_collision(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), - 'CoCoNestNamespaceCollision.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 1) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'invalid')), + 'CoCoNestNamespaceCollision.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 1) def test_valid_no_nest_collision(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), - 'CoCoNestNamespaceCollision.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'valid')), + 'CoCoNestNamespaceCollision.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) def test_invalid_redundant_input_port_keywords_detected(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), - 'CoCoInputPortWithRedundantTypes.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 1) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'invalid')), + 'CoCoInputPortWithRedundantTypes.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 1) def test_valid_redundant_input_port_keywords_detected(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), - 'CoCoInputPortWithRedundantTypes.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'valid')), + 'CoCoInputPortWithRedundantTypes.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) def test_invalid_parameters_assigned_only_in_parameters_block(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), - 'CoCoParameterAssignedOutsideBlock.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 1) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'invalid')), + 'CoCoParameterAssignedOutsideBlock.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 1) def test_valid_parameters_assigned_only_in_parameters_block(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), - 'CoCoParameterAssignedOutsideBlock.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'valid')), + 'CoCoParameterAssignedOutsideBlock.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) def test_invalid_inline_expressions_assigned_only_in_declaration(self): Logger.set_logging_level(LoggingLevel.INFO) @@ -332,253 +461,377 @@ def test_valid_internals_assigned_only_in_internals_block(self): def test_invalid_function_with_wrong_arg_number_detected(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), - 'CoCoFunctionCallNotConsistentWrongArgNumber.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 1) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'invalid')), + 'CoCoFunctionCallNotConsistentWrongArgNumber.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 1) def test_valid_function_with_wrong_arg_number_detected(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), - 'CoCoFunctionCallNotConsistentWrongArgNumber.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'valid')), + 'CoCoFunctionCallNotConsistentWrongArgNumber.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) def test_invalid_init_values_have_rhs_and_ode(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), - 'CoCoInitValuesWithoutOde.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.WARNING)), 2) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'invalid')), + 'CoCoInitValuesWithoutOde.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.WARNING)), 2) def test_valid_init_values_have_rhs_and_ode(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), - 'CoCoInitValuesWithoutOde.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.WARNING)), 2) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'valid')), + 'CoCoInitValuesWithoutOde.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.WARNING)), 2) def test_invalid_incorrect_return_stmt_detected(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), - 'CoCoIncorrectReturnStatement.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 4) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'invalid')), + 'CoCoIncorrectReturnStatement.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 4) def test_valid_incorrect_return_stmt_detected(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), - 'CoCoIncorrectReturnStatement.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'valid')), + 'CoCoIncorrectReturnStatement.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) def test_invalid_ode_vars_outside_init_block_detected(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), - 'CoCoOdeVarNotInInitialValues.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 1) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'invalid')), + 'CoCoOdeVarNotInInitialValues.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 1) def test_valid_ode_vars_outside_init_block_detected(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), - 'CoCoOdeVarNotInInitialValues.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'valid')), + 'CoCoOdeVarNotInInitialValues.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) def test_invalid_convolve_correctly_defined(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), - 'CoCoConvolveNotCorrectlyProvided.nestml')) - self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], - LoggingLevel.ERROR)), 3) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'invalid')), + 'CoCoConvolveNotCorrectlyProvided.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 3) def test_valid_convolve_correctly_defined(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), - 'CoCoConvolveNotCorrectlyProvided.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'valid')), + 'CoCoConvolveNotCorrectlyProvided.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) def test_invalid_vector_in_non_vector_declaration_detected(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), - 'CoCoVectorInNonVectorDeclaration.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 1) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'invalid')), + 'CoCoVectorInNonVectorDeclaration.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 1) def test_valid_vector_in_non_vector_declaration_detected(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), - 'CoCoVectorInNonVectorDeclaration.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'valid')), + 'CoCoVectorInNonVectorDeclaration.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) def test_invalid_vector_parameter_declaration(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), - 'CoCoVectorParameterDeclaration.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 1) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'invalid')), + 'CoCoVectorParameterDeclaration.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 1) def test_valid_vector_parameter_declaration(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), - 'CoCoVectorParameterDeclaration.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'valid')), + 'CoCoVectorParameterDeclaration.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) def test_invalid_vector_parameter_type(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), - 'CoCoVectorParameterType.nestml')) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'invalid')), + 'CoCoVectorParameterType.nestml')) self.assertEqual(len( Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 3) def test_valid_vector_parameter_type(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), - 'CoCoVectorParameterType.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'valid')), + 'CoCoVectorParameterType.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) def test_invalid_vector_parameter_size(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), - 'CoCoVectorDeclarationSize.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 2) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'invalid')), + 'CoCoVectorDeclarationSize.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 2) def test_valid_vector_parameter_size(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), - 'CoCoVectorDeclarationSize.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'valid')), + 'CoCoVectorDeclarationSize.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) def test_invalid_convolve_correctly_parameterized(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), - 'CoCoConvolveNotCorrectlyParametrized.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 2) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'invalid')), + 'CoCoConvolveNotCorrectlyParametrized.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 2) def test_valid_convolve_correctly_parameterized(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), - 'CoCoConvolveNotCorrectlyParametrized.nestml')) - self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], - LoggingLevel.ERROR)), 0) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'valid')), + 'CoCoConvolveNotCorrectlyParametrized.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) def test_invalid_invariant_correctly_typed(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), - 'CoCoInvariantNotBool.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 1) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'invalid')), + 'CoCoInvariantNotBool.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 1) def test_valid_invariant_correctly_typed(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), - 'CoCoInvariantNotBool.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'valid')), + 'CoCoInvariantNotBool.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) def test_invalid_expression_correctly_typed(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), - 'CoCoIllegalExpression.nestml')) - self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], - LoggingLevel.ERROR)), 6) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'invalid')), + 'CoCoIllegalExpression.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 6) def test_valid_expression_correctly_typed(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), - 'CoCoIllegalExpression.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'valid')), + 'CoCoIllegalExpression.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) def test_invalid_compound_expression_correctly_typed(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), - 'CompoundOperatorWithDifferentButCompatibleUnits.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 5) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'invalid')), + 'CompoundOperatorWithDifferentButCompatibleUnits.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 5) def test_valid_compound_expression_correctly_typed(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), - 'CompoundOperatorWithDifferentButCompatibleUnits.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'valid')), + 'CompoundOperatorWithDifferentButCompatibleUnits.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) def test_invalid_ode_correctly_typed(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), - 'CoCoOdeIncorrectlyTyped.nestml')) - self.assertTrue(len(Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], - LoggingLevel.ERROR)) > 0) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'invalid')), + 'CoCoOdeIncorrectlyTyped.nestml')) + self.assertTrue(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)) > 0) def test_valid_ode_correctly_typed(self): Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), - 'CoCoOdeCorrectlyTyped.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'valid')), + 'CoCoOdeCorrectlyTyped.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) def test_invalid_output_block_defined_if_emit_call(self): """test that an error is raised when the emit_spike() function is called by the neuron, but an output block is not defined""" Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), - 'CoCoOutputPortDefinedIfEmitCall.nestml')) - self.assertTrue(len(Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], - LoggingLevel.ERROR)) > 0) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'invalid')), + 'CoCoOutputPortDefinedIfEmitCall.nestml')) + self.assertTrue(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)) > 0) def test_invalid_output_port_defined_if_emit_call(self): """test that an error is raised when the emit_spike() function is called by the neuron, but a spiking output port is not defined""" Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), - 'CoCoOutputPortDefinedIfEmitCall-2.nestml')) - self.assertTrue(len(Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], - LoggingLevel.ERROR)) > 0) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'invalid')), + 'CoCoOutputPortDefinedIfEmitCall-2.nestml')) + self.assertTrue(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)) > 0) def test_valid_output_port_defined_if_emit_call(self): """test that no error is raised when the output block is missing, but not emit_spike() functions are called""" Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), - 'CoCoOutputPortDefinedIfEmitCall.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'valid')), + 'CoCoOutputPortDefinedIfEmitCall.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) def test_valid_coco_kernel_type(self): """ @@ -586,10 +839,14 @@ def test_valid_coco_kernel_type(self): """ Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), - 'CoCoKernelType.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'valid')), + 'CoCoKernelType.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) def test_invalid_coco_kernel_type(self): """ @@ -597,10 +854,14 @@ def test_invalid_coco_kernel_type(self): """ Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), - 'CoCoKernelType.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 1) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'invalid')), + 'CoCoKernelType.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 1) def test_invalid_coco_kernel_type_initial_values(self): """ @@ -608,10 +869,14 @@ def test_invalid_coco_kernel_type_initial_values(self): """ Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), - 'CoCoKernelTypeInitialValues.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 4) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'invalid')), + 'CoCoKernelTypeInitialValues.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 4) def test_valid_coco_state_variables_initialized(self): """ @@ -619,10 +884,14 @@ def test_valid_coco_state_variables_initialized(self): """ Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), - 'CoCoStateVariablesInitialized.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'valid')), + 'CoCoStateVariablesInitialized.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) def test_invalid_coco_state_variables_initialized(self): """ @@ -630,50 +899,70 @@ def test_invalid_coco_state_variables_initialized(self): """ Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), - 'CoCoStateVariablesInitialized.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_neuron_list()[0], LoggingLevel.ERROR)), 2) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'invalid')), + 'CoCoStateVariablesInitialized.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 2) def test_invalid_co_co_priorities_correctly_specified(self): """ """ Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), - 'CoCoPrioritiesCorrectlySpecified.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_synapse_list()[0], LoggingLevel.ERROR)), 1) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'invalid')), + 'CoCoPrioritiesCorrectlySpecified.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_synapse_list()[0], LoggingLevel.ERROR)), 1) def test_valid_co_co_priorities_correctly_specified(self): """ """ Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), - 'CoCoPrioritiesCorrectlySpecified.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_synapse_list()[0], LoggingLevel.ERROR)), 0) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'valid')), + 'CoCoPrioritiesCorrectlySpecified.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_synapse_list()[0], LoggingLevel.ERROR)), 0) def test_invalid_co_co_resolution_legally_used(self): """ """ Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), - 'CoCoResolutionLegallyUsed.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_synapse_list()[0], LoggingLevel.ERROR)), 2) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'invalid')), + 'CoCoResolutionLegallyUsed.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_synapse_list()[0], LoggingLevel.ERROR)), 2) def test_valid_co_co_resolution_legally_used(self): """ """ Logger.set_logging_level(LoggingLevel.INFO) model = ModelParser.parse_model( - os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), - 'CoCoResolutionLegallyUsed.nestml')) - self.assertEqual(len( - Logger.get_all_messages_of_level_and_or_node(model.get_synapse_list()[0], LoggingLevel.ERROR)), 0) + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), + 'valid')), + 'CoCoResolutionLegallyUsed.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_synapse_list()[0], LoggingLevel.ERROR)), 0) def test_valid_co_co_vector_input_port(self): Logger.set_logging_level(LoggingLevel.INFO) diff --git a/tests/invalid/stdp_synapse_missing_delay_decorator.nestml b/tests/invalid/stdp_synapse_missing_delay_decorator.nestml new file mode 100644 index 000000000..9c9b0caf8 --- /dev/null +++ b/tests/invalid/stdp_synapse_missing_delay_decorator.nestml @@ -0,0 +1,79 @@ +""" +stdp - Synapse model for spike-timing dependent plasticity +######################################################### + +Description ++++++++++++ + +stdp_synapse is a synapse with spike-timing dependent plasticity (as defined in [1]_). Here the weight dependence exponent can be set separately for potentiation and depression. Examples: + +=================== ==== ============================= +Multiplicative STDP [2]_ mu_plus = mu_minus = 1 +Additive STDP [3]_ mu_plus = mu_minus = 0 +Guetig STDP [1]_ mu_plus, mu_minus in [0, 1] +Van Rossum STDP [4]_ mu_plus = 0 mu_minus = 1 +=================== ==== ============================= + + +References +++++++++++ + +.. [1] Guetig et al. (2003) Learning Input Correlations through Nonlinear + Temporally Asymmetric Hebbian Plasticity. Journal of Neuroscience + +.. [2] Rubin, J., Lee, D. and Sompolinsky, H. (2001). Equilibrium + properties of temporally asymmetric Hebbian plasticity, PRL + 86,364-367 + +.. [3] Song, S., Miller, K. D. and Abbott, L. F. (2000). Competitive + Hebbian learning through spike-timing-dependent synaptic + plasticity,Nature Neuroscience 3:9,919--926 + +.. [4] van Rossum, M. C. W., Bi, G-Q and Turrigiano, G. G. (2000). + Stable Hebbian learning from spike timing-dependent + plasticity, Journal of Neuroscience, 20:23,8812--8821 +""" +synapse stdp: + state: + w real = 1. @nest::weight # Synaptic weight + pre_trace real = 0. + post_trace real = 0. + + parameters: + d ms = 1 ms # Synaptic transmission delay + lambda real = .01 + tau_tr_pre ms = 20 ms + tau_tr_post ms = 20 ms + alpha real = 1 + mu_plus real = 1 + mu_minus real = 1 + Wmax real = 100. + Wmin real = 0. + + equations: + pre_trace' = -pre_trace / tau_tr_pre + post_trace' = -post_trace / tau_tr_post + + input: + pre_spikes <- spike + post_spikes <- spike + + output: + spike + + onReceive(post_spikes): + post_trace += 1 + + # potentiate synapse + w_ real = Wmax * ( w / Wmax + (lambda * ( 1. - ( w / Wmax ) )**mu_plus * pre_trace )) + w = min(Wmax, w_) + + onReceive(pre_spikes): + pre_trace += 1 + + # depress synapse + w_ real = Wmax * ( w / Wmax - ( alpha * lambda * ( w / Wmax )**mu_minus * post_trace )) + w = max(Wmin, w_) + + # deliver spike to postsynaptic partner + deliver_spike(w, d) diff --git a/tests/nest_compartmental_tests/cocos_test.py b/tests/nest_compartmental_tests/cocos_test.py new file mode 100644 index 000000000..37266db7a --- /dev/null +++ b/tests/nest_compartmental_tests/cocos_test.py @@ -0,0 +1,125 @@ +# -*- coding: utf-8 -*- +# +# cocos_test.py +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . + +from __future__ import print_function + +import os +import unittest +from pynestml.frontend.frontend_configuration import FrontendConfiguration + +from pynestml.utils.ast_source_location import ASTSourceLocation +from pynestml.symbol_table.symbol_table import SymbolTable +from pynestml.symbols.predefined_functions import PredefinedFunctions +from pynestml.symbols.predefined_types import PredefinedTypes +from pynestml.symbols.predefined_units import PredefinedUnits +from pynestml.symbols.predefined_variables import PredefinedVariables +from pynestml.utils.logger import LoggingLevel, Logger +from pynestml.utils.model_parser import ModelParser + + +class CoCosTest(unittest.TestCase): + + def setUp(self): + Logger.init_logger(LoggingLevel.INFO) + SymbolTable.initialize_symbol_table( + ASTSourceLocation( + start_line=0, + start_column=0, + end_line=0, + end_column=0)) + PredefinedUnits.register_units() + PredefinedTypes.register_types() + PredefinedVariables.register_variables() + PredefinedFunctions.register_functions() + FrontendConfiguration.target_platform = "NEST_COMPARTMENTAL" + + def test_invalid_cm_variables_declared(self): + model = ModelParser.parse_model( + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), 'resources', + 'invalid')), + 'CoCoCmVariablesDeclared.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 4) + + def test_valid_cm_variables_declared(self): + Logger.set_logging_level(LoggingLevel.INFO) + model = ModelParser.parse_model( + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), 'resources', + 'valid')), + 'CoCoCmVariablesDeclared.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) + + # it is currently not enforced for the non-cm parameter block, but cm + # needs that + def test_invalid_cm_variable_has_rhs(self): + model = ModelParser.parse_model( + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), 'resources', + 'invalid')), + 'CoCoCmVariableHasRhs.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 2) + + def test_valid_cm_variable_has_rhs(self): + Logger.set_logging_level(LoggingLevel.INFO) + model = ModelParser.parse_model( + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), 'resources', + 'valid')), + 'CoCoCmVariableHasRhs.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) + + # it is currently not enforced for the non-cm parameter block, but cm + # needs that + def test_invalid_cm_v_comp_exists(self): + model = ModelParser.parse_model( + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), 'resources', + 'invalid')), + 'CoCoCmVcompExists.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 4) + + def test_valid_cm_v_comp_exists(self): + Logger.set_logging_level(LoggingLevel.INFO) + model = ModelParser.parse_model( + os.path.join( + os.path.realpath( + os.path.join( + os.path.dirname(__file__), 'resources', + 'valid')), + 'CoCoCmVcompExists.nestml')) + self.assertEqual(len(Logger.get_all_messages_of_level_and_or_node( + model.get_neuron_list()[0], LoggingLevel.ERROR)), 0) diff --git a/tests/nest_compartmental_tests/compartmental_model_test.py b/tests/nest_compartmental_tests/compartmental_model_test.py new file mode 100644 index 000000000..619c9a7bb --- /dev/null +++ b/tests/nest_compartmental_tests/compartmental_model_test.py @@ -0,0 +1,522 @@ +# -*- coding: utf-8 -*- +# +# compartmental_model_test.py +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . + +import numpy as np +import os +import pytest +import unittest + +import nest + +from pynestml.codegeneration.nest_tools import NESTTools +from pynestml.frontend.pynestml_frontend import generate_nest_compartmental_target + +# set to `True` to plot simulation traces +TEST_PLOTS = True +try: + import matplotlib + import matplotlib.pyplot as plt +except BaseException as e: + # always set TEST_PLOTS to False if matplotlib can not be imported + TEST_PLOTS = False + +dt = .001 + +soma_params = { + # passive parameters + 'C_m': 89.245535, # pF + 'g_C': 0.0, # soma has no parent + 'g_L': 8.924572508, # nS + 'e_L': -75.0, + # E-type specific + 'gbar_Na': 4608.698576715, # nS + 'e_Na': 60., + 'gbar_K': 956.112772900, # nS + 'e_K': -90. +} +dend_params_passive = { + # passive parameters + 'C_m': 1.929929, + 'g_C': 1.255439494, + 'g_L': 0.192992878, + 'e_L': -75.0, + # by default, active conducances are set to zero, so we don't need to specify + # them explicitely +} +dend_params_active = { + # passive parameters + 'C_m': 1.929929, # pF + 'g_C': 1.255439494, # nS + 'g_L': 0.192992878, # nS + 'e_L': -70.0, # mV + # E-type specific + 'gbar_Na': 17.203212493, # nS + 'e_Na': 60., # mV + 'gbar_K': 11.887347450, # nS + 'e_K': -90. # mV +} + + +class CMTest(unittest.TestCase): + + def reset_nest(self): + nest.ResetKernel() + nest.SetKernelStatus(dict(resolution=dt)) + + def install_nestml_model(self): + tests_path = os.path.realpath(os.path.dirname(__file__)) + input_path = os.path.join( + tests_path, + "resources", + "cm_default.nestml" + ) + target_path = os.path.join( + tests_path, + "target/" + ) + + if not os.path.exists(target_path): + os.makedirs(target_path) + + print( + f"Compiled nestml model 'cm_main_cm_default_nestml' not found, installing in:" + f" {target_path}" + ) + + generate_nest_compartmental_target( + input_path=input_path, + target_path="/tmp/nestml-component/", + module_name="cm_defaultmodule", + suffix="_nestml", + logging_level="DEBUG" + ) + + def get_model(self, reinstall_flag=True): + if self.nestml_flag: + # Currently, we have no way of checking whether the *.so-file + # associated with the model is in {nest build directory}/lib/nest, + # so we only check the reinstall flag, which should be set to True + # unless the testcase is being debugged + if reinstall_flag: + self.install_nestml_model() + + print("Instantiating NESTML compartmental model") + + nest.Install("cm_defaultmodule") + + cm_act = nest.Create("cm_default_nestml") + cm_pas = nest.Create("cm_default_nestml") + else: + print("Instantiating NEST compartmental model") + # default model built into NEST Simulator + cm_pas = nest.Create('cm_default') + cm_act = nest.Create('cm_default') + + return cm_act, cm_pas + + def get_rec_list(self): + if self.nestml_flag: + return [ + 'v_comp0', 'v_comp1', + 'm_Na0', 'h_Na0', 'n_K0', 'm_Na1', 'h_Na1', 'n_K1', + 'g_AN_AMPA1', 'g_AN_NMDA1' + ] + else: + return [ + 'v_comp0', 'v_comp1', + 'm_Na_0', 'h_Na_0', 'n_K_0', 'm_Na_1', 'h_Na_1', 'n_K_1', + 'g_r_AN_AMPA_1', 'g_d_AN_AMPA_1', 'g_r_AN_NMDA_1', 'g_d_AN_NMDA_1' + ] + + def run_model(self): + self.reset_nest() + cm_act, cm_pas = self.get_model() + + # create a neuron model with a passive dendritic compartment + cm_pas.compartments = [ + {"parent_idx": -1, "params": soma_params}, + {"parent_idx": 0, "params": dend_params_passive} + ] + + # create a neuron model with an active dendritic compartment + cm_act.compartments = [ + {"parent_idx": -1, "params": soma_params}, + {"parent_idx": 0, "params": dend_params_active} + ] + + # set spike thresholds + cm_pas.V_th = -50. + cm_act.V_th = -50. + + # add somatic and dendritic receptor to passive dendrite model + cm_pas.receptors = [ + {"comp_idx": 0, "receptor_type": "AMPA_NMDA"}, + {"comp_idx": 1, "receptor_type": "AMPA_NMDA"} + ] + syn_idx_soma_pas = 0 + syn_idx_dend_pas = 1 + + # add somatic and dendritic receptor to active dendrite model + cm_act.receptors = [ + {"comp_idx": 0, "receptor_type": "AMPA_NMDA"}, + {"comp_idx": 1, "receptor_type": "AMPA_NMDA"} + ] + syn_idx_soma_act = 0 + syn_idx_dend_act = 1 + + # create a two spike generators + sg_soma = nest.Create('spike_generator', 1, { + 'spike_times': [10., 13., 16.]}) + sg_dend = nest.Create('spike_generator', 1, { + 'spike_times': [70., 73., 76.]}) + + # connect spike generators to passive dendrite model (weight in nS) + nest.Connect( + sg_soma, + cm_pas, + syn_spec={ + 'synapse_model': 'static_synapse', + 'weight': 5., + 'delay': .5, + 'receptor_type': syn_idx_soma_pas}) + nest.Connect( + sg_dend, + cm_pas, + syn_spec={ + 'synapse_model': 'static_synapse', + 'weight': 2., + 'delay': .5, + 'receptor_type': syn_idx_dend_pas}) + # connect spike generators to active dendrite model (weight in nS) + nest.Connect( + sg_soma, + cm_act, + syn_spec={ + 'synapse_model': 'static_synapse', + 'weight': 5., + 'delay': .5, + 'receptor_type': syn_idx_soma_act}) + nest.Connect( + sg_dend, + cm_act, + syn_spec={ + 'synapse_model': 'static_synapse', + 'weight': 2., + 'delay': .5, + 'receptor_type': syn_idx_dend_act}) + + # create multimeters to record state variables + rec_list = self.get_rec_list() + mm_pas = nest.Create('multimeter', 1, {'record_from': rec_list, 'interval': dt}) + mm_act = nest.Create('multimeter', 1, {'record_from': rec_list, 'interval': dt}) + # connect the multimeters to the respective neurons + nest.Connect(mm_pas, cm_pas) + nest.Connect(mm_act, cm_act) + + # simulate the models + nest.Simulate(160.) + res_pas = nest.GetStatus(mm_pas, 'events')[0] + res_act = nest.GetStatus(mm_act, 'events')[0] + + return res_act, res_pas + + @pytest.mark.skipif(NESTTools.detect_nest_version().startswith("v2"), + reason="This test does not support NEST 2") + def test_compartmental_model(self): + self.nestml_flag = False + recordables_nest = self.get_rec_list() + res_act_nest, res_pas_nest = self.run_model() + + self.nestml_flag = True + recordables_nestml = self.get_rec_list() + res_act_nestml, res_pas_nestml = self.run_model() + + if TEST_PLOTS: + w_legends = False + + plt.figure('voltage', figsize=(6, 6)) + # NEST + # plot voltage for somatic compartment + ax_soma = plt.subplot(221) + ax_soma.set_title('NEST') + ax_soma.plot( + res_pas_nest['times'], + res_pas_nest['v_comp0'], + c='b', + label='passive dend') + ax_soma.plot(res_act_nest['times'], res_act_nest['v_comp0'], + c='b', ls='--', lw=2., label='active dend') + ax_soma.set_xlabel(r'$t$ (ms)') + ax_soma.set_ylabel(r'$v_{soma}$ (mV)') + ax_soma.set_ylim((-90., 40.)) + if w_legends: + ax_soma.legend(loc=0) + # plot voltage for dendritic compartment + ax_dend = plt.subplot(222) + ax_dend.set_title('NEST') + ax_dend.plot( + res_pas_nest['times'], + res_pas_nest['v_comp1'], + c='r', + label='passive dend') + ax_dend.plot(res_act_nest['times'], res_act_nest['v_comp1'], + c='r', ls='--', lw=2., label='active dend') + ax_dend.set_xlabel(r'$t$ (ms)') + ax_dend.set_ylabel(r'$v_{dend}$ (mV)') + ax_dend.set_ylim((-90., 40.)) + if w_legends: + ax_dend.legend(loc=0) + + # NESTML + # plot voltage for somatic compartment + ax_soma = plt.subplot(223) + ax_soma.set_title('NESTML') + ax_soma.plot( + res_pas_nestml['times'], + res_pas_nestml['v_comp0'], + c='b', + label='passive dend') + ax_soma.plot(res_act_nestml['times'], res_act_nestml['v_comp0'], + c='b', ls='--', lw=2., label='active dend') + ax_soma.set_xlabel(r'$t$ (ms)') + ax_soma.set_ylabel(r'$v_{soma}$ (mV)') + ax_soma.set_ylim((-90., 40.)) + if w_legends: + ax_soma.legend(loc=0) + # plot voltage for dendritic compartment + ax_dend = plt.subplot(224) + ax_dend.set_title('NESTML') + ax_dend.plot( + res_pas_nestml['times'], + res_pas_nestml['v_comp1'], + c='r', + label='passive dend') + ax_dend.plot(res_act_nestml['times'], res_act_nestml['v_comp1'], + c='r', ls='--', lw=2., label='active dend') + ax_dend.set_xlabel(r'$t$ (ms)') + ax_dend.set_ylabel(r'$v_{dend}$ (mV)') + ax_dend.set_ylim((-90., 40.)) + if w_legends: + ax_dend.legend(loc=0) + plt.savefig("compartmental_model_test - voltage.png") + + plt.figure('channel state variables', figsize=(6, 6)) + # NEST + # plot traces for somatic compartment + ax_soma = plt.subplot(221) + ax_soma.set_title('NEST') + ax_soma.plot( + res_pas_nest['times'], + res_pas_nest['m_Na_0'], + c='b', + label='m_Na passive dend') + ax_soma.plot( + res_pas_nest['times'], + res_pas_nest['h_Na_0'], + c='r', + label='h_Na passive dend') + ax_soma.plot( + res_pas_nest['times'], + res_pas_nest['n_K_0'], + c='g', + label='n_K passive dend') + ax_soma.plot(res_act_nest['times'], res_act_nest['m_Na_0'], + c='b', ls='--', lw=2., label='m_Na active dend') + ax_soma.plot(res_act_nest['times'], res_act_nest['h_Na_0'], + c='r', ls='--', lw=2., label='h_Na active dend') + ax_soma.plot(res_act_nest['times'], res_act_nest['n_K_0'], + c='g', ls='--', lw=2., label='n_K active dend') + ax_soma.set_xlabel(r'$t$ (ms)') + ax_soma.set_ylabel(r'svar') + ax_soma.set_ylim((0., 1.)) + if w_legends: + ax_soma.legend(loc=0) + # plot voltage for dendritic compartment + ax_dend = plt.subplot(222) + ax_dend.set_title('NEST') + ax_dend.plot( + res_pas_nest['times'], + res_pas_nest['m_Na_1'], + c='b', + label='m_Na passive dend') + ax_dend.plot( + res_pas_nest['times'], + res_pas_nest['h_Na_1'], + c='r', + label='h_Na passive dend') + ax_dend.plot( + res_pas_nest['times'], + res_pas_nest['n_K_1'], + c='g', + label='n_K passive dend') + ax_dend.plot(res_act_nest['times'], res_act_nest['m_Na_1'], + c='b', ls='--', lw=2., label='m_Na active dend') + ax_dend.plot(res_act_nest['times'], res_act_nest['h_Na_1'], + c='r', ls='--', lw=2., label='h_Na active dend') + ax_dend.plot(res_act_nest['times'], res_act_nest['n_K_1'], + c='g', ls='--', lw=2., label='n_K active dend') + ax_dend.set_xlabel(r'$t$ (ms)') + ax_dend.set_ylabel(r'svar') + ax_dend.set_ylim((0., 1.)) + if w_legends: + ax_dend.legend(loc=0) + + # NESTML + # plot traces for somatic compartment + ax_soma = plt.subplot(223) + ax_soma.set_title('NESTML') + ax_soma.plot( + res_pas_nestml['times'], + res_pas_nestml['m_Na0'], + c='b', + label='m_Na passive dend') + ax_soma.plot( + res_pas_nestml['times'], + res_pas_nestml['h_Na0'], + c='r', + label='h_Na passive dend') + ax_soma.plot( + res_pas_nestml['times'], + res_pas_nestml['n_K0'], + c='g', + label='n_K passive dend') + ax_soma.plot(res_act_nestml['times'], res_act_nestml['m_Na0'], + c='b', ls='--', lw=2., label='m_Na active dend') + ax_soma.plot(res_act_nestml['times'], res_act_nestml['h_Na0'], + c='r', ls='--', lw=2., label='h_Na active dend') + ax_soma.plot(res_act_nestml['times'], res_act_nestml['n_K0'], + c='g', ls='--', lw=2., label='n_K active dend') + ax_soma.set_xlabel(r'$t$ (ms)') + ax_soma.set_ylabel(r'svar') + ax_soma.set_ylim((0., 1.)) + if w_legends: + ax_soma.legend(loc=0) + # plot voltage for dendritic compartment + ax_dend = plt.subplot(224) + ax_dend.set_title('NESTML') + ax_dend.plot( + res_pas_nestml['times'], + res_pas_nestml['m_Na1'], + c='b', + label='m_Na passive dend') + ax_dend.plot( + res_pas_nestml['times'], + res_pas_nestml['h_Na1'], + c='r', + label='h_Na passive dend') + ax_dend.plot( + res_pas_nestml['times'], + res_pas_nestml['n_K1'], + c='g', + label='n_K passive dend') + ax_dend.plot(res_act_nestml['times'], res_act_nestml['m_Na1'], + c='b', ls='--', lw=2., label='m_Na active dend') + ax_dend.plot(res_act_nestml['times'], res_act_nestml['h_Na1'], + c='r', ls='--', lw=2., label='h_Na active dend') + ax_dend.plot(res_act_nestml['times'], res_act_nestml['n_K1'], + c='g', ls='--', lw=2., label='n_K active dend') + ax_dend.set_xlabel(r'$t$ (ms)') + ax_dend.set_ylabel(r'svar') + ax_dend.set_ylim((0., 1.)) + if w_legends: + ax_dend.legend(loc=0) + plt.savefig("compartmental_model_test - channel state variables.png") + + plt.figure('dendritic synapse conductances', figsize=(3, 6)) + # NEST + # plot traces for dendritic compartment + ax_dend = plt.subplot(211) + ax_dend.set_title('NEST') + ax_dend.plot( + res_pas_nest['times'], + res_pas_nest['g_r_AN_AMPA_1'] + res_pas_nest['g_d_AN_AMPA_1'], + c='b', + label='AMPA passive dend') + ax_dend.plot( + res_pas_nest['times'], + res_pas_nest['g_r_AN_NMDA_1'] + res_pas_nest['g_d_AN_NMDA_1'], + c='r', + label='NMDA passive dend') + ax_dend.plot( + res_act_nest['times'], + res_act_nest['g_r_AN_AMPA_1'] + res_act_nest['g_d_AN_AMPA_1'], + c='b', + ls='--', + lw=2., + label='AMPA active dend') + ax_dend.plot( + res_act_nest['times'], + res_act_nest['g_r_AN_NMDA_1'] + res_act_nest['g_d_AN_NMDA_1'], + c='r', + ls='--', + lw=2., + label='NMDA active dend') + ax_dend.set_xlabel(r'$t$ (ms)') + ax_dend.set_ylabel(r'$g_{syn1}$ (uS)') + if w_legends: + ax_dend.legend(loc=0) + # plot traces for dendritic compartment + # NESTML + ax_dend = plt.subplot(212) + ax_dend.set_title('NESTML') + ax_dend.plot( + res_pas_nestml['times'], + res_pas_nestml['g_AN_AMPA1'], + c='b', + label='AMPA passive dend') + ax_dend.plot( + res_pas_nestml['times'], + res_pas_nestml['g_AN_NMDA1'], + c='r', + label='NMDA passive dend') + ax_dend.plot(res_act_nestml['times'], res_act_nestml['g_AN_AMPA1'], + c='b', ls='--', lw=2., label='AMPA active dend') + ax_dend.plot(res_act_nestml['times'], res_act_nestml['g_AN_NMDA1'], + c='r', ls='--', lw=2., label='NMDA active dend') + ax_dend.set_xlabel(r'$t$ (ms)') + ax_dend.set_ylabel(r'$g_{syn1}$ (uS)') + if w_legends: + ax_dend.legend(loc=0) + + plt.tight_layout() + plt.savefig("compartmental_model_test - dendritic synapse conductances.png") + + # check if voltages, ion channels state variables are equal + for var_nest, var_nestml in zip( + recordables_nest[:8], recordables_nestml[:8]): + self.assertTrue(np.allclose( + res_act_nest[var_nest], res_act_nestml[var_nestml], atol=5e-1)) + + # check if synaptic conductances are equal + self.assertTrue( + np.allclose( + res_act_nest['g_r_AN_AMPA_1'] + res_act_nest['g_d_AN_AMPA_1'], + res_act_nestml['g_AN_AMPA1'], + 5e-3)) + self.assertTrue( + np.allclose( + res_act_nest['g_r_AN_NMDA_1'] + res_act_nest['g_d_AN_NMDA_1'], + res_act_nestml['g_AN_NMDA1'], + 5e-3)) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/nest_compartmental_tests/concmech_model_test.py b/tests/nest_compartmental_tests/concmech_model_test.py new file mode 100644 index 000000000..76f3436b1 --- /dev/null +++ b/tests/nest_compartmental_tests/concmech_model_test.py @@ -0,0 +1,113 @@ +# -*- coding: utf-8 -*- +# +# concmech_model_test.py +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . + +import os +import pytest + +import nest + +from pynestml.codegeneration.nest_tools import NESTTools +from pynestml.frontend.pynestml_frontend import generate_nest_compartmental_target + +# set to `True` to plot simulation traces +TEST_PLOTS = True +try: + import matplotlib + import matplotlib.pyplot as plt +except BaseException as e: + # always set TEST_PLOTS to False if matplotlib can not be imported + TEST_PLOTS = False + + +class TestCompartmentalConcmech: + @pytest.fixture(scope="module", autouse=True) + def setup(self): + nest.ResetKernel() + nest.SetKernelStatus(dict(resolution=.1)) + + generate_nest_compartmental_target(input_path=os.path.join(os.path.realpath(os.path.dirname(__file__)), "resources", "concmech.nestml"), + suffix="_nestml", + logging_level="DEBUG", + module_name="concmech_mockup_module") + nest.Install("concmech_mockup_module") + + def test_concmech(self): + cm = nest.Create('multichannel_test_model_nestml') + + soma_params = {'C_m': 10.0, 'g_c': 0.0, 'g_L': 1.5, 'e_L': -70.0, 'gbar_Ca_HVA': 1.0, 'gbar_Ca_LVAst': 0.0} + dend_params = {'C_m': 0.1, 'g_c': 0.1, 'g_L': 0.1, 'e_L': -70.0} + + # nest.AddCompartment(cm, 0, -1, soma_params) + cm.compartments = [ + {"parent_idx": -1, "params": soma_params} + # {"parent_idx": 0, "params": dend_params}, + # {"parent_idx": 0, "params": dend_params} + ] + # nest.AddCompartment(cm, 1, 0, dend_params) + # nest.AddCompartment(cm, 2, 0, dend_params) + + # cm.V_th = -50. + + cm.receptors = [ + {"comp_idx": 0, "receptor_type": "AMPA"} + # {"comp_idx": 1, "receptor_type": "AMPA"}, + # {"comp_idx": 2, "receptor_type": "AMPA"} + ] + + # syn_idx_GABA = 0 + # syn_idx_AMPA = 1 + # syn_idx_NMDA = 2 + + # sg1 = nest.Create('spike_generator', 1, {'spike_times': [50., 100., 125., 137., 143., 146., 600.]}) + sg1 = nest.Create('spike_generator', 1, {'spike_times': [100., 1000., 1100., 1200., 1300., 1400., 1500., 1600., 1700., 1800., 1900., 2000., 5000.]}) + # sg1 = nest.Create('spike_generator', 1, {'spike_times': [(item*6000) for item in range(1, 20)]}) + # sg2 = nest.Create('spike_generator', 1, {'spike_times': [115., 155., 160., 162., 170., 254., 260., 272., 278.]}) + # sg3 = nest.Create('spike_generator', 1, {'spike_times': [250., 255., 260., 262., 270.]}) + + nest.Connect(sg1, cm, syn_spec={'synapse_model': 'static_synapse', 'weight': 4.0, 'delay': 0.5, 'receptor_type': 0}) + # nest.Connect(sg2, cm, syn_spec={'synapse_model': 'static_synapse', 'weight': .2, 'delay': 0.5, 'receptor_type': 1}) + # nest.Connect(sg3, cm, syn_spec={'synapse_model': 'static_synapse', 'weight': .3, 'delay': 0.5, 'receptor_type': 2}) + + mm = nest.Create('multimeter', 1, {'record_from': ['v_comp0', 'c_Ca0', 'i_tot_Ca_LVAst0', 'i_tot_Ca_HVA0'], 'interval': .1}) + + nest.Connect(mm, cm) + + nest.Simulate(6000.) + + res = nest.GetStatus(mm, 'events')[0] + + fig, axs = plt.subplots(5) + + axs[0].plot(res['times'], res['v_comp0'], c='b', label='V_m_0') + axs[1].plot(res['times'], res['i_tot_Ca_LVAst0'], c='r', label='i_Ca_LVAst_0') + axs[1].plot(res['times'], res['i_tot_Ca_HVA0'], c='g', label='i_Ca_HVA_0') + axs[2].plot(res['times'], res['c_Ca0'], c='r', label='c_Ca_0') + + axs[0].set_title('V_m_0') + axs[1].set_title('i_Ca_HVA/LVA_0') + axs[2].set_title('c_Ca_0') + # plt.plot(res['times'], res['v_comp2'], c='g', label='V_m_2') + + axs[0].legend() + axs[1].legend() + axs[2].legend() + + plt.savefig("concmech_test.png") diff --git a/tests/nest_compartmental_tests/resources/cm_default.nestml b/tests/nest_compartmental_tests/resources/cm_default.nestml new file mode 100644 index 000000000..f57d28903 --- /dev/null +++ b/tests/nest_compartmental_tests/resources/cm_default.nestml @@ -0,0 +1,152 @@ +""" +Example compartmental model for NESTML + +Description ++++++++++++ +Corresponds to standard compartmental model implemented in NEST. +""" +neuron cm_default: + + state: + + # compartmental voltage variable, + # rhs value is irrelevant but the state must exist so that the nestml parser doesn't complain + v_comp real = 0 + + ### ion channels ### + # initial values state variables sodium channel + m_Na real = 0.0 + h_Na real = 0.0 + + # initial values state variables potassium channel + n_K real = 0.0 + + + parameters: + ### ion channels ### + # default parameters sodium channel + e_Na real = 50.0 + gbar_Na real = 0.0 + + # default parameters potassium channel + e_K real = -85.0 + gbar_K real = 0.0 + + ### synapses ### + e_AMPA real = 0 mV # Excitatory reversal Potential + tau_r_AMPA real = 0.2 ms # Synaptic Time Constant Excitatory Synapse + tau_d_AMPA real = 3.0 ms # Synaptic Time Constant Excitatory Synapse + + e_GABA real = -80. mV # Inhibitory reversal Potential + tau_r_GABA real = 0.2 ms # Synaptic Time Constant Inhibitory Synapse + tau_d_GABA real = 10.0 ms # Synaptic Time Constant Inhibitory Synapse + + e_NMDA real = 0 mV # NMDA reversal Potential + tau_r_NMDA real = 0.2 ms # Synaptic Time Constant NMDA Synapse + tau_d_NMDA real = 43.0 ms # Synaptic Time Constant NMDA Synapse + + e_AN_AMPA real = 0 mV # Excitatory reversal Potential + tau_r_AN_AMPA real = 0.2 ms # Synaptic Time Constant Excitatory Synapse + tau_d_AN_AMPA real = 3.0 ms # Synaptic Time Constant Excitatory Synapse + e_AN_NMDA real = 0 mV # NMDA reversal Potential + tau_r_AN_NMDA real = 0.2 ms # Synaptic Time Constant NMDA Synapse + tau_d_AN_NMDA real = 43.0 ms # Synaptic Time Constant NMDA Synapse + NMDA_ratio real = 2.0 # NMDA_ratio + + equations: + # Here, we define the currents that are present in the model. Currents may, + # or may not depend on [v_comp]. Each variable in the equation for the currents + # must correspond either to a parameter (e.g. [gbar_Na], [e_Na], e_[NMDA], etc...) + # or to a state variable (e.g [m_Na], [n_K], [g_r_AMPA], etc...). + # + # When it is a parameter, it must be configurable from Python, by adding it as + # a key: value pair to the dictionary argument of `nest.AddCompartment` for an + # ion channel or of `nest.AddReceptor` for a synapse. + # + # State variables must reoccur in the initial values block and have an associated + # equation in the equations block. + # + # Internally, the model must compute the pair of values (g_val, i_val) for the + # integration algorithm. To do so, we need both the equation for current, and + # its voltage derivative + # + # i_X + # d(i_X)/dv + # + # Which we should be able to obtain from sympy trough symbolic differentiation. + # Then, + # + # g_val = d(i_X)/d(v_comp) / 2. + # i_val = i_X - d(i_X)/d(v_comp) / 2. + + ### ion channels ### + h_Na' = (h_inf_Na(v_comp) - h_Na) / (tau_h_Na(v_comp) * 1 s) + m_Na' = (m_inf_Na(v_comp) - m_Na) / (tau_m_Na(v_comp) * 1 s) + n_K' = (n_inf_K(v_comp) - n_K) / (tau_n_K(v_comp) * 1 s) + + ### synapses, must contain convolution(s) with spike input ### + + inline Na real = gbar_Na * m_Na**3 * h_Na * (e_Na - v_comp) @mechanism::channel + inline K real = gbar_K * n_K * (e_K - v_comp) @mechanism::channel + + ### synapses, characterized by convolution(s) with spike input ### + kernel g_AMPA = g_norm_AMPA * ( - exp(-t / tau_r_AMPA) + exp(-t / tau_d_AMPA) ) + inline AMPA real = convolve(g_AMPA, spikes_AMPA) * (e_AMPA - v_comp) @mechanism::receptor + + kernel g_GABA = g_norm_GABA * ( - exp(-t / tau_r_GABA) + exp(-t / tau_d_GABA) ) + inline GABA real = convolve(g_GABA, spikes_GABA) * (e_GABA - v_comp ) @mechanism::receptor + + kernel g_NMDA = g_norm_NMDA * ( - exp(-t / tau_r_NMDA) + exp(-t / tau_d_NMDA) ) + inline NMDA real = convolve(g_NMDA, spikes_NMDA) * (e_NMDA - v_comp ) / (1. + 0.3 * exp( -.1 * v_comp )) @mechanism::receptor + + kernel g_AN_AMPA = g_norm_AN_AMPA * ( - exp(-t / tau_r_AN_AMPA) + exp(-t / tau_d_AN_AMPA) ) + kernel g_AN_NMDA = g_norm_AN_NMDA * ( - exp(-t / tau_r_AN_NMDA) + exp(-t / tau_d_AN_NMDA) ) + inline AMPA_NMDA real = convolve(g_AN_AMPA, spikes_AN) * (e_AN_AMPA - v_comp) + NMDA_ratio * \ + convolve(g_AN_NMDA, spikes_AN) * (e_AN_NMDA - v_comp) / (1. + 0.3 * exp( -.1 * v_comp )) @mechanism::receptor + + # functions K + function n_inf_K (v_comp real) real: + return 0.02*(1.0 - exp(0.111111111111111*(25.0 - v_comp)))**(-1)*(-0.002*(-25.0 + v_comp)*(1.0 - exp(0.111111111111111*(-25.0 + v_comp)))**(-1) + 0.02*(-25.0 + v_comp)*(1.0 - exp(0.111111111111111*(25.0 - v_comp)))**(-1))**(-1)*(-25.0 + v_comp) + + function tau_n_K (v_comp real) real: + return 0.311526479750779*(-0.002*(-25.0 + v_comp)*(1.0 - exp(0.111111111111111*(-25.0 + v_comp)))**(-1) + 0.02*(-25.0 + v_comp)*(1.0 - exp(0.111111111111111*(25.0 - v_comp)))**(-1))**(-1) + + + # functions Na + function m_inf_Na (v_comp real) real: + return (1.0 - 0.020438532058318*exp(-0.111111111111111*v_comp))**(-1)*((1.0 - 0.020438532058318*exp(-0.111111111111111*v_comp))**(-1)*(6.372366 + 0.182*v_comp) + (1.0 - 48.9271928701465*exp(0.111111111111111*v_comp))**(-1)*(-4.341612 - 0.124*v_comp))**(-1)*(6.372366 + 0.182*v_comp) + + function tau_m_Na (v_comp real) real: + return 0.311526479750779*((1.0 - 0.020438532058318*exp(-0.111111111111111*v_comp))**(-1)*(6.372366 + 0.182*v_comp) + (1.0 - 48.9271928701465*exp(0.111111111111111*v_comp))**(-1)*(-4.341612 - 0.124*v_comp))**(-1) + + function h_inf_Na (v_comp real) real: + return 1.0*(1.0 + 35734.4671267926*exp(0.161290322580645*v_comp))**(-1) + + function tau_h_Na (v_comp real) real: + return 0.311526479750779*((1.0 - 4.52820432639598e-5*exp(-0.2*v_comp))**(-1)*(1.200312 + 0.024*v_comp) + (1.0 - 3277527.87650153*exp(0.2*v_comp))**(-1)*(-0.6826183 - 0.0091*v_comp))**(-1) + + + internals: + tp_AMPA real = (tau_r_AMPA * tau_d_AMPA) / (tau_d_AMPA - tau_r_AMPA) * ln( tau_d_AMPA / tau_r_AMPA ) + g_norm_AMPA real = 1. / ( -exp( -tp_AMPA / tau_r_AMPA ) + exp( -tp_AMPA / tau_d_AMPA ) ) + + tp_GABA real = (tau_r_GABA * tau_d_GABA) / (tau_d_GABA - tau_r_GABA) * ln( tau_d_GABA / tau_r_GABA ) + g_norm_GABA real = 1. / ( -exp( -tp_GABA / tau_r_GABA ) + exp( -tp_GABA / tau_d_GABA ) ) + + tp_NMDA real = (tau_r_NMDA * tau_d_NMDA) / (tau_d_NMDA - tau_r_NMDA) * ln( tau_d_NMDA / tau_r_NMDA ) + g_norm_NMDA real = 1. / ( -exp( -tp_NMDA / tau_r_NMDA ) + exp( -tp_NMDA / tau_d_NMDA ) ) + + tp_AN_AMPA real = (tau_r_AN_AMPA * tau_d_AN_AMPA) / (tau_d_AN_AMPA - tau_r_AN_AMPA) * ln( tau_d_AN_AMPA / tau_r_AN_AMPA ) + g_norm_AN_AMPA real = 1. / ( -exp( -tp_AN_AMPA / tau_r_AN_AMPA ) + exp( -tp_AN_AMPA / tau_d_AN_AMPA ) ) + + tp_AN_NMDA real = (tau_r_AN_NMDA * tau_d_AN_NMDA) / (tau_d_AN_NMDA - tau_r_AN_NMDA) * ln( tau_d_AN_NMDA / tau_r_AN_NMDA ) + g_norm_AN_NMDA real = 1. / ( -exp( -tp_AN_NMDA / tau_r_AN_NMDA ) + exp( -tp_AN_NMDA / tau_d_AN_NMDA ) ) + + input: + spikes_AMPA <- spike + spikes_GABA <- spike + spikes_NMDA <- spike + spikes_AN <- spike + + output: + spike diff --git a/tests/nest_compartmental_tests/resources/concmech.nestml b/tests/nest_compartmental_tests/resources/concmech.nestml new file mode 100644 index 000000000..5e365d7df --- /dev/null +++ b/tests/nest_compartmental_tests/resources/concmech.nestml @@ -0,0 +1,196 @@ + +neuron multichannel_test_model: + parameters: + e_AMPA real = 0 mV + tau_r_AMPA real = 0.2 ms + tau_d_AMPA real = 3.0 ms + + # parameters Ca_HVA + gbar_Ca_HVA real = 0.00 + e_Ca_HVA real = 50.00 + + # parameters Ca_LVAst + gbar_Ca_LVAst real = 0.00 + e_Ca_LVAst real = 50.00 + + # parameters NaTa_t + gbar_NaTa_t real = 0.00 + e_NaTa_t real = 50.00 + + # parameters SK_E2 + gbar_SK_E2 real = 0.00 + e_SK_E2 real = -85.00 + + # parameters SKv3_1 + gbar_SKv3_1 real = 0.00 + e_SKv3_1 real = -85.00 + + # parameters Ca conentration mech + gamma_Ca real = 0.04627 + tau_Ca real = 605.03 + inf_Ca real = 0.0001 + + state: + v_comp real = -7500.00000000 + + # state variables Ca_HVA + h_Ca_HVA real = 0.69823671 + m_Ca_HVA real = 0.00000918 + + # state variables Ca_LVAst + h_Ca_LVAst real = 0.08756384 + m_Ca_LVAst real = 0.00291975 + + # state variables NaTa_t + h_NaTa_t real = 0.81757448 + m_NaTa_t real = 0.00307019 + + # state variables SK_E2 + z_SK_E2 real = 0.00090982 + + # state variables SKv3_1 + z_SKv3_1 real = 0.00006379 + + # state variable Ca concentration + c_Ca real = 0.0001 + + equations: + kernel g_AMPA = g_norm_AMPA * ( - exp(-t / tau_r_AMPA) + exp(-t / tau_d_AMPA) ) + inline AMPA real = convolve(g_AMPA, spikes_AMPA) * (e_AMPA - v_comp) @mechanism::receptor + + # equations Ca_HVA + inline Ca_HVA real = gbar_Ca_HVA * (h_Ca_HVA*m_Ca_HVA**2) * (e_Ca_HVA - v_comp) @mechanism::channel + m_Ca_HVA' = ( m_inf_Ca_HVA(v_comp) - m_Ca_HVA ) / (tau_m_Ca_HVA(v_comp)*1s) + h_Ca_HVA' = ( h_inf_Ca_HVA(v_comp) - h_Ca_HVA ) / (tau_h_Ca_HVA(v_comp)*1s) + + # equations Ca_LVAst + inline Ca_LVAst real = gbar_Ca_LVAst * (h_Ca_LVAst*m_Ca_LVAst**2) * (e_Ca_LVAst - v_comp) @mechanism::channel + m_Ca_LVAst' = ( m_inf_Ca_LVAst(v_comp) - m_Ca_LVAst ) / (tau_m_Ca_LVAst(v_comp)*1s) + h_Ca_LVAst' = ( h_inf_Ca_LVAst(v_comp) - h_Ca_LVAst ) / (tau_h_Ca_LVAst(v_comp)*1s) + + # equations NaTa_t + #inline NaTa_t real = gbar_NaTa_t * (h_NaTa_t*m_NaTa_t**3) * (e_NaTa_t - v_comp) @mechanism::channel + #m_NaTa_t' = ( m_inf_NaTa_t(v_comp) - m_NaTa_t ) / (tau_m_NaTa_t(v_comp)*1s) + #h_NaTa_t' = ( h_inf_NaTa_t(v_comp) - h_NaTa_t ) / (tau_h_NaTa_t(v_comp)*1s) + + # equations SKv3_1 + #inline SKv3_1 real = gbar_SKv3_1 * (z_SKv3_1) * (e_SKv3_1 - v_comp) @mechanism::channel + #z_SKv3_1' = ( z_inf_SKv3_1(v_comp) - z_SKv3_1 ) / (tau_z_SKv3_1(v_comp)*1s) + + # equations SK_E2 + #inline SK_E2 real = gbar_SK_E2 * (z_SK_E2) * (e_SK_E2 - v_comp) @mechanism::channel + #z_SK_E2' = ( z_inf_SK_E2(c_Ca) - z_SK_E2) / 1.0s + + # equations Ca concentration mechanism + c_Ca' = (inf_Ca - c_Ca) / (tau_Ca*1s) + (gamma_Ca * (Ca_HVA + Ca_LVAst)) / 1s @mechanism::concentration + + + + # functions Ca_HVA + function h_inf_Ca_HVA (v_comp real) real: + val real + val = 0.000457*(0.000457 + (0.000457 + 0.0065*exp(13/50 + (1/50)*v_comp))*exp(15/28 + (1/28)*v_comp))**(-1)*(1 + exp(-15/28 - 1/28*v_comp))*exp(15/28 + (1/28)*v_comp) + return val + + + function tau_h_Ca_HVA (v_comp real) real: + val real + val = 1.0*(0.0065*(1 + exp(-15/28 - 1/28*v_comp))**(-1) + 0.000457*exp(-13/50 - 1/50*v_comp))**(-1) + return val + + function m_inf_Ca_HVA (v_comp real) real: + val real + val = (-9.36151644263754e-6 + 0.055*(27 + v_comp)*exp(0.321981424148607*v_comp) + 0.0114057221149848*exp(0.263157894736842*v_comp))**(-1)*(1.485 + 0.055*v_comp)*exp(0.321981424148607*v_comp) + return val + + + function tau_m_Ca_HVA (v_comp real) real: + val real + val = (-9.36151644263754e-6 + 0.055*(27 + v_comp)*exp(0.321981424148607*v_comp) + 0.0114057221149848*exp(0.263157894736842*v_comp))**(-1)*(-0.000820773673798209 + 1.0*exp(0.263157894736842*v_comp))*exp((1/17)*v_comp) + return val + + + # functions Ca_LVAst + function h_inf_Ca_LVAst (v_comp real) real: + val real + val = 1.0*(1 + 1280165.59676428*exp(0.15625*v_comp))**(-1) + return val + + + function tau_h_Ca_LVAst (v_comp real) real: + val real + val = (1 + 1265.03762380433*exp((1/7)*v_comp))**(-1)*(23.7056491911662 + 8568.15374958056*exp((1/7)*v_comp)) + return val + + function m_inf_Ca_LVAst (v_comp real) real: + val real + val = 1.0*(1 + 0.00127263380133981*exp(-1/6*v_comp))**(-1) + return val + + + function tau_m_Ca_LVAst (v_comp real) real: + val real + val = (1 + 1096.63315842846*exp((1/5)*v_comp))**(-1)*(8.46630328255936 + 1856.88578179326*exp((1/5)*v_comp)) + return val + + + # functions NaTa_t + function h_inf_NaTa_t (v_comp real) real: + val real + if v_comp >= -66.000006 and v_comp < -65.999994: + val = -(2.25 + 0.0416666666666667 * v_comp) + else: + val = (-1.67017007902457e-05 + 59874.1417151978 * exp(0.333333333333333 * v_comp)) ** (-1) * (-1.67017007902457e-05 + 1.0 * exp(0.166666666666667 * v_comp)) + + return val + + + function tau_h_NaTa_t (v_comp real) real: + val real + if v_comp >= -66.000006 and v_comp < -65.999994: + val = 1.88140072945764 + else: + val = (-0.00110231225215621 + 3951693.35320306 * exp(0.333333333333333 * v_comp) - 1.67017007902457e-05 * v_comp + 59874.1417151978 * v_comp * exp(0.333333333333333 * v_comp)) ** (-1) * (0.000377071104599416 - 45.1536175069833 * exp(0.166666666666667 * v_comp) + 1351767.04678348 * exp(0.333333333333333 * v_comp)) + + return val + + function m_inf_NaTa_t (v_comp real) real: + val real + if v_comp > -38.000006 and v_comp < -37.999994: + val = (2.938 + 0.029 * v_comp) ** (-1) * (4.55 + 0.091 * v_comp) + else: + val = 0.182 * (0.182 * (-1 + 563.030236835951 * exp(0.166666666666667 * v_comp)) * (38.0 + v_comp) * exp(0.166666666666667 * v_comp) + (4.712 + 0.124 * v_comp) * (-0.00177610354573438 + exp(0.166666666666667 * v_comp))) ** (-1) * (-1 + 563.030236835951 * exp(0.166666666666667 * v_comp)) * (38.0 + v_comp) * exp(0.166666666666667 * v_comp) + return val + + function tau_m_NaTa_t (v_comp real) real: + val real + if v_comp > -38.000006 and v_comp < -37.999994: + val = 0.338652131302374 * (2.938 + 0.029 * v_comp) ** (-1) + else: + val = 0.338652131302374 * (0.182 * (-1 + 563.030236835951 * exp(0.166666666666667 * v_comp)) * (38.0 + v_comp) * exp(0.166666666666667 * v_comp) + (4.712 + 0.124 * v_comp) * (-0.00177610354573438 + exp(0.166666666666667 * v_comp))) ** (-1) * (-1 + 563.030236835951 * exp(0.166666666666667 * v_comp)) * (-0.00177610354573438 + exp(0.166666666666667 * v_comp)) + return val + + # functions SKv3_1 + function z_inf_SKv3_1 (v_comp real) real: + val real + val = (6.874610940966 + exp(0.103092783505155*v_comp))**(-1)*exp(0.103092783505155*v_comp) + return val + + function tau_z_SKv3_1 (v_comp real) real: + val real + val = 4.0*(0.348253173014273 + exp(0.0226551880380607*v_comp))**(-1)*exp(0.0226551880380607*v_comp) + return val + + + # functions SK_E2 + function z_inf_SK_E2 (ca real) real: + val real + val = 1. / (1. + (0.00043 / ca)**4.8) + return val + + internals: + tp_AMPA real = (tau_r_AMPA * tau_d_AMPA) / (tau_d_AMPA - tau_r_AMPA) * ln( tau_d_AMPA / tau_r_AMPA ) + g_norm_AMPA real = 1. / ( -exp( -tp_AMPA / tau_r_AMPA ) + exp( -tp_AMPA / tau_d_AMPA ) ) + + input: + spikes_AMPA <- spike diff --git a/tests/nest_compartmental_tests/resources/invalid/CoCoCmVariableHasRhs.nestml b/tests/nest_compartmental_tests/resources/invalid/CoCoCmVariableHasRhs.nestml new file mode 100644 index 000000000..258db7961 --- /dev/null +++ b/tests/nest_compartmental_tests/resources/invalid/CoCoCmVariableHasRhs.nestml @@ -0,0 +1,57 @@ +""" +CoCoCmVariableHasRhs.nestml +########################### + + +Description ++++++++++++ + +This model is used to test whether the all variable declarations of the +compartmental model contain a right hand side expression + +Negative case. + + +Copyright statement ++++++++++++++++++++ + +This file is part of NEST. + +Copyright (C) 2004 The NEST Initiative + +NEST is free software: you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation, either version 2 of the License, or +(at your option) any later version. + +NEST is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with NEST. If not, see . +""" + +neuron cm_model_four_invalid: + + state: + # compartmental voltage variable, + # rhs value is irrelevant but the state must exist so that the nestml parser doesn't complain + v_comp real + + m_Na real + + #sodium + function m_inf_Na(v_comp real) real: + return (0.182*v_comp + 6.3723659999999995)/((1.0 - 0.020438532058318047*exp(-0.1111111111111111*v_comp))*((-0.124*v_comp - 4.3416119999999996)/(1.0 - 48.927192870146527*exp(0.1111111111111111*v_comp)) + (0.182*v_comp + 6.3723659999999995)/(1.0 - 0.020438532058318047*exp(-0.1111111111111111*v_comp)))) + + function tau_m_Na(v_comp real) real: + return 0.3115264797507788/((-0.124*v_comp - 4.3416119999999996)/(1.0 - 48.927192870146527*exp(0.1111111111111111*v_comp)) + (0.182*v_comp + 6.3723659999999995)/(1.0 - 0.020438532058318047*exp(-0.1111111111111111*v_comp))) + + equations: + inline Na real = m_Na**3 + + parameters: + e_Na real + gbar_Na real diff --git a/tests/nest_compartmental_tests/resources/invalid/CoCoCmVariableMultiUse.nestml b/tests/nest_compartmental_tests/resources/invalid/CoCoCmVariableMultiUse.nestml new file mode 100644 index 000000000..85e16ac13 --- /dev/null +++ b/tests/nest_compartmental_tests/resources/invalid/CoCoCmVariableMultiUse.nestml @@ -0,0 +1,57 @@ +""" +CoCoCmVariableMultiUse.nestml +########################### + + +Description ++++++++++++ + +This model is used to test whether the inline expression that characterizes +a channel uses each variable exactly once + +Negative case. + + +Copyright statement ++++++++++++++++++++ + +This file is part of NEST. + +Copyright (C) 2004 The NEST Initiative + +NEST is free software: you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation, either version 2 of the License, or +(at your option) any later version. + +NEST is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with NEST. If not, see . +""" + +neuron cm_model_five_invalid: + + state: + # compartmental voltage variable, + # rhs value is irrelevant but the state must exist so that the nestml parser doesn't complain + v_comp real = 0.0 + + m_Na real = 0.0 + + #sodium + function m_inf_Na(v_comp real) real: + return (0.182*v_comp + 6.3723659999999995)/((1.0 - 0.020438532058318047*exp(-0.1111111111111111*v_comp))*((-0.124*v_comp - 4.3416119999999996)/(1.0 - 48.927192870146527*exp(0.1111111111111111*v_comp)) + (0.182*v_comp + 6.3723659999999995)/(1.0 - 0.020438532058318047*exp(-0.1111111111111111*v_comp)))) + + function tau_m_Na(v_comp real) real: + return 0.3115264797507788/((-0.124*v_comp - 4.3416119999999996)/(1.0 - 48.927192870146527*exp(0.1111111111111111*v_comp)) + (0.182*v_comp + 6.3723659999999995)/(1.0 - 0.020438532058318047*exp(-0.1111111111111111*v_comp))) + + equations: + inline Na real = m_Na**3 * m_Na**2 + + parameters: + e_Na real = 50.0 + gbar_Na real = 0.0 diff --git a/tests/nest_compartmental_tests/resources/invalid/CoCoCmVariablesDeclared.nestml b/tests/nest_compartmental_tests/resources/invalid/CoCoCmVariablesDeclared.nestml new file mode 100644 index 000000000..e7d1b4b51 --- /dev/null +++ b/tests/nest_compartmental_tests/resources/invalid/CoCoCmVariablesDeclared.nestml @@ -0,0 +1,57 @@ +""" +CoCoCmVariablesDeclared.nestml +########################### + + +Description ++++++++++++ + +This model is used to test whether compartmental variables used in the inline expression +are also declared in the corresponding state / parameter block + +Negative case. + + +Copyright statement ++++++++++++++++++++ + +This file is part of NEST. + +Copyright (C) 2004 The NEST Initiative + +NEST is free software: you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation, either version 2 of the License, or +(at your option) any later version. + +NEST is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with NEST. If not, see . +""" + +neuron cm_model_seven_invalid: + + state: + # compartmental voltage variable, + # rhs value is irrelevant but the state must exist so that the nestml parser doesn't complain + v_comp real = 0.0 + + #sodium + function m_inf_Na(v_comp real) real: + return (0.182*v_comp + 6.3723659999999995)/((1.0 - 0.020438532058318047*exp(-0.1111111111111111*v_comp))*((-0.124*v_comp - 4.3416119999999996)/(1.0 - 48.927192870146527*exp(0.1111111111111111*v_comp)) + (0.182*v_comp + 6.3723659999999995)/(1.0 - 0.020438532058318047*exp(-0.1111111111111111*v_comp)))) + + function tau_m_Na(v_comp real) real: + return 0.3115264797507788/((-0.124*v_comp - 4.3416119999999996)/(1.0 - 48.927192870146527*exp(0.1111111111111111*v_comp)) + (0.182*v_comp + 6.3723659999999995)/(1.0 - 0.020438532058318047*exp(-0.1111111111111111*v_comp))) + + function h_inf_Na(v_comp real) real: + return 1.0/(exp(0.16129032258064516*v_comp + 10.483870967741936) + 1.0) + + function tau_h_Na(v_comp real) real: + return 0.3115264797507788/((-0.0091000000000000004*v_comp - 0.68261830000000012)/(1.0 - 3277527.8765015295*exp(0.20000000000000001*v_comp)) + (0.024*v_comp + 1.200312)/(1.0 - 4.5282043263959816e-5*exp(-0.20000000000000001*v_comp))) + + equations: + inline Na real = m_Na**3 * h_Na**1 diff --git a/tests/nest_compartmental_tests/resources/invalid/CoCoCmVcompExists.nestml b/tests/nest_compartmental_tests/resources/invalid/CoCoCmVcompExists.nestml new file mode 100644 index 000000000..ce111ad4f --- /dev/null +++ b/tests/nest_compartmental_tests/resources/invalid/CoCoCmVcompExists.nestml @@ -0,0 +1,60 @@ +""" +CoCoCmVcompExists.nestml +########################### + + +Description ++++++++++++ + +This model is used to test whether, in case of a compartmental model ("NEST_COMPARTMENTAL"), +there is the required variable called v_comp defined in the state block + +Negative case. + + +Copyright statement ++++++++++++++++++++ + +This file is part of NEST. + +Copyright (C) 2004 The NEST Initiative + +NEST is free software: you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation, either version 2 of the License, or +(at your option) any later version. + +NEST is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with NEST. If not, see . +""" + +neuron cm_model_eight_invalid: + + state: + # compartmental voltage variable, + # rhs value is irrelevant but the state must exist so that the nestml parser doesn't complain + m_Na real = 0.0 + + #sodium + function m_inf_Na(v_comp real) real: + return (0.182*v_comp + 6.3723659999999995)/((1.0 - 0.020438532058318047*exp(-0.1111111111111111*v_comp))*((-0.124*v_comp - 4.3416119999999996)/(1.0 - 48.927192870146527*exp(0.1111111111111111*v_comp)) + (0.182*v_comp + 6.3723659999999995)/(1.0 - 0.020438532058318047*exp(-0.1111111111111111*v_comp)))) + + function tau_m_Na(v_comp real) real: + return 0.3115264797507788/((-0.124*v_comp - 4.3416119999999996)/(1.0 - 48.927192870146527*exp(0.1111111111111111*v_comp)) + (0.182*v_comp + 6.3723659999999995)/(1.0 - 0.020438532058318047*exp(-0.1111111111111111*v_comp))) + + function h_inf_Na(v_comp real) real: + return 1.0/(exp(0.16129032258064516*v_comp + 10.483870967741936) + 1.0) + + function tau_h_Na(v_comp real) real: + return 0.3115264797507788/((-0.0091000000000000004*v_comp - 0.68261830000000012)/(1.0 - 3277527.8765015295*exp(0.20000000000000001*v_comp)) + (0.024*v_comp + 1.200312)/(1.0 - 4.5282043263959816e-5*exp(-0.20000000000000001*v_comp))) + + equations: + inline Na real = m_Na**3 * h_Na**1 + + parameters: + foo real = 1. diff --git a/tests/nest_compartmental_tests/resources/valid/CoCoCmVariableHasRhs.nestml b/tests/nest_compartmental_tests/resources/valid/CoCoCmVariableHasRhs.nestml new file mode 100644 index 000000000..0084144e1 --- /dev/null +++ b/tests/nest_compartmental_tests/resources/valid/CoCoCmVariableHasRhs.nestml @@ -0,0 +1,57 @@ +""" +CoCoCmVariableHasRhs.nestml +########################### + + +Description ++++++++++++ + +This model is used to test whether the all variable declarations of the +compartmental model contain a right hand side expression + +Positive case. + + +Copyright statement ++++++++++++++++++++ + +This file is part of NEST. + +Copyright (C) 2004 The NEST Initiative + +NEST is free software: you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation, either version 2 of the License, or +(at your option) any later version. + +NEST is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with NEST. If not, see . +""" + +neuron cm_model_four: + + state: + # compartmental voltage variable, + # rhs value is irrelevant but the state must exist so that the nestml parser doesn't complain + v_comp real = 0.0 + + m_Na real = 0.0 + + #sodium + function m_inf_Na(v_comp real) real: + return (0.182*v_comp + 6.3723659999999995)/((1.0 - 0.020438532058318047*exp(-0.1111111111111111*v_comp))*((-0.124*v_comp - 4.3416119999999996)/(1.0 - 48.927192870146527*exp(0.1111111111111111*v_comp)) + (0.182*v_comp + 6.3723659999999995)/(1.0 - 0.020438532058318047*exp(-0.1111111111111111*v_comp)))) + + function tau_m_Na(v_comp real) real: + return 0.3115264797507788/((-0.124*v_comp - 4.3416119999999996)/(1.0 - 48.927192870146527*exp(0.1111111111111111*v_comp)) + (0.182*v_comp + 6.3723659999999995)/(1.0 - 0.020438532058318047*exp(-0.1111111111111111*v_comp))) + + equations: + inline Na real = m_Na**3 + + parameters: + e_Na real = 50.0 + gbar_Na real = 0.0 diff --git a/tests/nest_compartmental_tests/resources/valid/CoCoCmVariableMultiUse.nestml b/tests/nest_compartmental_tests/resources/valid/CoCoCmVariableMultiUse.nestml new file mode 100644 index 000000000..d44365da0 --- /dev/null +++ b/tests/nest_compartmental_tests/resources/valid/CoCoCmVariableMultiUse.nestml @@ -0,0 +1,57 @@ +""" +CoCoCmVariableMultiUse.nestml +########################### + + +Description ++++++++++++ + +This model is used to test whether the inline expression that characterizes +a channel uses each variable exactly once + +Positive case. + + +Copyright statement ++++++++++++++++++++ + +This file is part of NEST. + +Copyright (C) 2004 The NEST Initiative + +NEST is free software: you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation, either version 2 of the License, or +(at your option) any later version. + +NEST is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with NEST. If not, see . +""" + +neuron cm_model_five: + + state: + # compartmental voltage variable, + # rhs value is irrelevant but the state must exist so that the nestml parser doesn't complain + v_comp real = 0.0 + + m_Na real = 0.0 + + #sodium + function m_inf_Na(v_comp real) real: + return (0.182*v_comp + 6.3723659999999995)/((1.0 - 0.020438532058318047*exp(-0.1111111111111111*v_comp))*((-0.124*v_comp - 4.3416119999999996)/(1.0 - 48.927192870146527*exp(0.1111111111111111*v_comp)) + (0.182*v_comp + 6.3723659999999995)/(1.0 - 0.020438532058318047*exp(-0.1111111111111111*v_comp)))) + + function tau_m_Na(v_comp real) real: + return 0.3115264797507788/((-0.124*v_comp - 4.3416119999999996)/(1.0 - 48.927192870146527*exp(0.1111111111111111*v_comp)) + (0.182*v_comp + 6.3723659999999995)/(1.0 - 0.020438532058318047*exp(-0.1111111111111111*v_comp))) + + equations: + inline Na real = m_Na**3 + + parameters: + e_Na real = 50.0 + gbar_Na real = 0.0 diff --git a/tests/nest_compartmental_tests/resources/valid/CoCoCmVariablesDeclared.nestml b/tests/nest_compartmental_tests/resources/valid/CoCoCmVariablesDeclared.nestml new file mode 100644 index 000000000..2b8382036 --- /dev/null +++ b/tests/nest_compartmental_tests/resources/valid/CoCoCmVariablesDeclared.nestml @@ -0,0 +1,64 @@ +""" +CoCoCmVariablesDeclared.nestml +########################### + + +Description ++++++++++++ + +This model is used to test whether compartmental variables used in the inline expression +are also declared in the corresponding state / parameter block + +Positive case. + + +Copyright statement ++++++++++++++++++++ + +This file is part of NEST. + +Copyright (C) 2004 The NEST Initiative + +NEST is free software: you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation, either version 2 of the License, or +(at your option) any later version. + +NEST is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with NEST. If not, see . +""" + +neuron cm_model_seven: + + state: + # compartmental voltage variable, + # rhs value is irrelevant but the state must exist so that the nestml parser doesn't complain + v_comp real = 0.0 + + m_Na real = 0.0 + h_Na real = 0.0 + + #sodium + function m_inf_Na(v_comp real) real: + return (0.182*v_comp + 6.3723659999999995)/((1.0 - 0.020438532058318047*exp(-0.1111111111111111*v_comp))*((-0.124*v_comp - 4.3416119999999996)/(1.0 - 48.927192870146527*exp(0.1111111111111111*v_comp)) + (0.182*v_comp + 6.3723659999999995)/(1.0 - 0.020438532058318047*exp(-0.1111111111111111*v_comp)))) + + function tau_m_Na(v_comp real) real: + return 0.3115264797507788/((-0.124*v_comp - 4.3416119999999996)/(1.0 - 48.927192870146527*exp(0.1111111111111111*v_comp)) + (0.182*v_comp + 6.3723659999999995)/(1.0 - 0.020438532058318047*exp(-0.1111111111111111*v_comp))) + + function h_inf_Na(v_comp real) real: + return 1.0/(exp(0.16129032258064516*v_comp + 10.483870967741936) + 1.0) + + function tau_h_Na(v_comp real) real: + return 0.3115264797507788/((-0.0091000000000000004*v_comp - 0.68261830000000012)/(1.0 - 3277527.8765015295*exp(0.20000000000000001*v_comp)) + (0.024*v_comp + 1.200312)/(1.0 - 4.5282043263959816e-5*exp(-0.20000000000000001*v_comp))) + + equations: + inline Na real = m_Na**3 * h_Na**1 + + parameters: + e_Na real = 50.0 + gbar_Na real = 0.0 diff --git a/tests/nest_compartmental_tests/resources/valid/CoCoCmVcompExists.nestml b/tests/nest_compartmental_tests/resources/valid/CoCoCmVcompExists.nestml new file mode 100644 index 000000000..dd63d6b0c --- /dev/null +++ b/tests/nest_compartmental_tests/resources/valid/CoCoCmVcompExists.nestml @@ -0,0 +1,61 @@ +""" +CoCoCmVcompExists.nestml +########################### + + +Description ++++++++++++ + +This model is used to test whether, in case of a compartmental model ("NEST_COMPARTMENTAL"), +there is the required variable called v_comp defined in the state block + +Positive case. + + +Copyright statement ++++++++++++++++++++ + +This file is part of NEST. + +Copyright (C) 2004 The NEST Initiative + +NEST is free software: you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation, either version 2 of the License, or +(at your option) any later version. + +NEST is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with NEST. If not, see . +""" + +neuron cm_model_eight_valid: + + state: + # compartmental voltage variable, + # rhs value is irrelevant but the state must exist so that the nestml parser doesn't complain + v_comp real = 0.0 + m_Na real = 0.0 + + #sodium + function m_inf_Na(v_comp real) real: + return (0.182*v_comp + 6.3723659999999995)/((1.0 - 0.020438532058318047*exp(-0.1111111111111111*v_comp))*((-0.124*v_comp - 4.3416119999999996)/(1.0 - 48.927192870146527*exp(0.1111111111111111*v_comp)) + (0.182*v_comp + 6.3723659999999995)/(1.0 - 0.020438532058318047*exp(-0.1111111111111111*v_comp)))) + + function tau_m_Na(v_comp real) real: + return 0.3115264797507788/((-0.124*v_comp - 4.3416119999999996)/(1.0 - 48.927192870146527*exp(0.1111111111111111*v_comp)) + (0.182*v_comp + 6.3723659999999995)/(1.0 - 0.020438532058318047*exp(-0.1111111111111111*v_comp))) + + function h_inf_Na(v_comp real) real: + return 1.0/(exp(0.16129032258064516*v_comp + 10.483870967741936) + 1.0) + + function tau_h_Na(v_comp real) real: + return 0.3115264797507788/((-0.0091000000000000004*v_comp - 0.68261830000000012)/(1.0 - 3277527.8765015295*exp(0.20000000000000001*v_comp)) + (0.024*v_comp + 1.200312)/(1.0 - 4.5282043263959816e-5*exp(-0.20000000000000001*v_comp))) + + equations: + inline Na real = m_Na**3 + + parameters: + foo real = 1. \ No newline at end of file diff --git a/tests/nest_tests/nest_multisynapse_test.py b/tests/nest_tests/nest_multisynapse_test.py index 51807abca..88500f2b8 100644 --- a/tests/nest_tests/nest_multisynapse_test.py +++ b/tests/nest_tests/nest_multisynapse_test.py @@ -112,7 +112,7 @@ def test_multisynapse(self): ax[-1].set_xlabel("time") - plt.show() + plt.savefig("/tmp/nest_multisynapse_test.png") def test_multisynapse_with_vector_input_ports(self): input_path = os.path.join(os.path.realpath(os.path.join(