From a0866860151507606c8cfe06f6226160285e7d1f Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Wed, 18 Sep 2024 14:55:50 +0200 Subject: [PATCH 1/3] stp synapse name change --- ...stdp_synapse.nestml => stp_synapse.nestml} | 2 +- tests/nest_tests/stdp_synapse_test.py | 318 +----------------- 2 files changed, 16 insertions(+), 304 deletions(-) rename models/synapses/{stdp_synapse.nestml => stp_synapse.nestml} (99%) diff --git a/models/synapses/stdp_synapse.nestml b/models/synapses/stp_synapse.nestml similarity index 99% rename from models/synapses/stdp_synapse.nestml rename to models/synapses/stp_synapse.nestml index f04b4b971..b858d2b4d 100644 --- a/models/synapses/stdp_synapse.nestml +++ b/models/synapses/stp_synapse.nestml @@ -33,7 +33,7 @@ References Stable Hebbian learning from spike timing-dependent plasticity, Journal of Neuroscience, 20:23,8812--8821 """ -model stdp_synapse: +model stp_synapse: state: w real = 1 # Synaptic weight pre_trace real = 0. diff --git a/tests/nest_tests/stdp_synapse_test.py b/tests/nest_tests/stdp_synapse_test.py index e6ec31cb9..309e0e08c 100644 --- a/tests/nest_tests/stdp_synapse_test.py +++ b/tests/nest_tests/stdp_synapse_test.py @@ -45,318 +45,30 @@ class TestNestSTDPSynapse: - neuron_model_name = "iaf_psc_exp_neuron_nestml__with_stdp_synapse_nestml" - ref_neuron_model_name = "iaf_psc_exp_neuron_nestml_non_jit" - - synapse_model_name = "stdp_synapse_nestml__with_iaf_psc_exp_neuron_nestml" - ref_synapse_model_name = "stdp_synapse" + neuron_model_name = "iaf_psc_exp_neuron_nestml__with_stp_synapse_nestml" + synapse_model_name = "stp_synapse_nestml__with_iaf_psc_exp_neuron_nestml" @pytest.fixture(autouse=True, scope="module") def generate_model_code(self): """Generate the model code""" - jit_codegen_opts = {"neuron_synapse_pairs": [{"neuron": "iaf_psc_exp_neuron", - "synapse": "stdp_synapse", + codegen_opts = {"neuron_synapse_pairs": [{"neuron": "iaf_psc_exp_neuron", + "synapse": "stp_synapse", "post_ports": ["post_spikes"]}], - "delay_variable": {"stdp_synapse": "d"}, - "weight_variable": {"stdp_synapse": "w"}} - if not NESTTools.detect_nest_version().startswith("v2"): - jit_codegen_opts["neuron_parent_class"] = "StructuralPlasticityNode" - jit_codegen_opts["neuron_parent_class_include"] = "structural_plasticity_node.h" - - # generate the "jit" model (co-generated neuron and synapse), that does not rely on ArchivingNode + "delay_variable": {"stp_synapse": "d"}, + "weight_variable": {"stp_synapse": "w"}} files = [os.path.join("models", "neurons", "iaf_psc_exp_neuron.nestml"), - os.path.join("models", "synapses", "stdp_synapse.nestml")] + os.path.join("models", "synapses", "stp_synapse.nestml")] input_path = [os.path.realpath(os.path.join(os.path.dirname(__file__), os.path.join( - os.pardir, os.pardir, s))) for s in files] + os.pardir, os.pardir, s))) for s in files] generate_nest_target(input_path=input_path, - target_path="/tmp/nestml-jit", - logging_level="INFO", - module_name="nestml_jit_module", + logging_level="DEBUG", suffix="_nestml", - codegen_opts=jit_codegen_opts) - - if NESTTools.detect_nest_version().startswith("v2"): - non_jit_codegen_opts = {"neuron_parent_class": "Archiving_Node", - "neuron_parent_class_include": "archiving_node.h"} - else: - non_jit_codegen_opts = {"neuron_parent_class": "ArchivingNode", - "neuron_parent_class_include": "archiving_node.h"} - - # generate the "non-jit" model, that relies on ArchivingNode - generate_nest_target(input_path=os.path.realpath(os.path.join(os.path.dirname(__file__), - os.path.join(os.pardir, os.pardir, "models", "neurons", "iaf_psc_exp_neuron.nestml"))), - target_path="/tmp/nestml-non-jit", - logging_level="INFO", - module_name="nestml_non_jit_module", - suffix="_nestml_non_jit", - codegen_opts=non_jit_codegen_opts) - - @pytest.mark.parametrize("delay", [1., 1.5]) - @pytest.mark.parametrize("resolution", [.1, .5, 1.]) - @pytest.mark.parametrize("pre_spike_times,post_spike_times", [ - ([1., 11., 21.], - [6., 16., 26.]), - (np.sort(np.unique(1 + np.round(100 * np.sort(np.abs(np.random.randn(100)))))), - np.sort(np.unique(1 + np.round(100 * np.sort(np.abs(np.random.randn(100))))))), - (np.array([2., 4., 7., 8., 12., 13., 19., 23., 24., 28., 29., 30., 33., 34., - 35., 36., 38., 40., 42., 46., 51., 53., 54., 55., 56., 59., 63., 64., - 65., 66., 68., 72., 73., 76., 79., 80., 83., 84., 86., 87., 90., 95., - 99., 100., 103., 104., 105., 111., 112., 126., 131., 133., 134., 139., 147., 150., - 152., 155., 172., 175., 176., 181., 196., 197., 199., 202., 213., 215., 217., 265.]), - np.array([4., 5., 6., 7., 10., 11., 12., 16., 17., 18., 19., 20., 22., 23., - 25., 27., 29., 30., 31., 32., 34., 36., 37., 38., 39., 42., 44., 46., - 48., 49., 50., 54., 56., 57., 59., 60., 61., 62., 67., 74., 76., 79., - 80., 81., 83., 88., 93., 94., 97., 99., 100., 105., 111., 113., 114., 115., - 116., 119., 123., 130., 132., 134., 135., 145., 152., 155., 158., 166., 172., 174., - 188., 194., 202., 245., 249., 289., 454.])), - (np.array([1, 5, 6, 7, 9, 11, 12, 13, 14.5, 16.1]), - np.array([2, 3, 4, 8, 9, 10, 12, 13.2, 15.1, 16.4])) - ]) - def test_nest_stdp_synapse(self, pre_spike_times: Sequence[float], post_spike_times: Sequence[float], resolution: float, delay: float, fname_snip: str = ""): - self.run_synapse_test(neuron_model_name=self.neuron_model_name, - ref_neuron_model_name=self.ref_neuron_model_name, - synapse_model_name=self.synapse_model_name, - ref_synapse_model_name=self.ref_synapse_model_name, - resolution=resolution, # [ms] - delay=delay, # [ms] - pre_spike_times=pre_spike_times, - post_spike_times=post_spike_times, - fname_snip=fname_snip) - - def run_synapse_test(self, neuron_model_name, - ref_neuron_model_name, - synapse_model_name, - ref_synapse_model_name, - resolution=1., # [ms] - delay=1., # [ms] - sim_time=None, # if None, computed from pre and post spike times - pre_spike_times=None, - post_spike_times=None, - fname_snip=""): - - if pre_spike_times is None: - pre_spike_times = [] - - if post_spike_times is None: - post_spike_times = [] - - if sim_time is None: - sim_time = max(np.amax(pre_spike_times), np.amax(post_spike_times)) + 5 * delay - - nest.set_verbosity("M_ALL") - nest.ResetKernel() - - # load the generated modules into NEST - try: - nest.Install("nestml_jit_module") - except Exception: - # ResetKernel() does not unload modules for NEST Simulator < v3.7; ignore exception if module is already loaded on earlier versions - pass - - try: - nest.Install("nestml_non_jit_module") - except Exception: - # ResetKernel() does not unload modules for NEST Simulator < v3.7; ignore exception if module is already loaded on earlier versions - pass - - print("Pre spike times: " + str(pre_spike_times)) - print("Post spike times: " + str(post_spike_times)) - - # nest.set_verbosity("M_WARNING") - nest.set_verbosity("M_ERROR") - - nest.SetKernelStatus({"resolution": resolution}) - - wr = nest.Create("weight_recorder") - wr_ref = nest.Create("weight_recorder") - nest.CopyModel(synapse_model_name, "stdp_nestml_rec", - {"weight_recorder": wr[0], "w": 1., "d": 1., "receptor_type": 0}) - nest.CopyModel(ref_synapse_model_name, "stdp_ref_rec", - {"weight_recorder": wr_ref[0], "weight": 1., "delay": 1., "receptor_type": 0}) - - # create spike_generators with these times - pre_sg = nest.Create("spike_generator", - params={"spike_times": pre_spike_times, - "allow_offgrid_times": True}) - post_sg = nest.Create("spike_generator", - params={"spike_times": post_spike_times, - "allow_offgrid_times": True}) - - # create parrot neurons and connect spike_generators - if sim_mdl: - pre_neuron = nest.Create("parrot_neuron") - post_neuron = nest.Create(neuron_model_name) - nest.SetStatus(post_neuron, "tau_syn_exc", .2) # [ms] -- very brief - - if sim_ref: - pre_neuron_ref = nest.Create("parrot_neuron") - post_neuron_ref = nest.Create(ref_neuron_model_name) - nest.SetStatus(post_neuron_ref, "tau_syn_exc", .2) # [ms] -- very brief - - if sim_mdl: - if NESTTools.detect_nest_version().startswith("v2"): - spikedet_pre = nest.Create("spike_detector") - spikedet_post = nest.Create("spike_detector") - else: - spikedet_pre = nest.Create("spike_recorder") - spikedet_post = nest.Create("spike_recorder") - mm = nest.Create("multimeter", params={"record_from": [ - "V_m", "post_trace__for_stdp_synapse_nestml"]}) - if sim_ref: - if NESTTools.detect_nest_version().startswith("v2"): - spikedet_pre_ref = nest.Create("spike_detector") - spikedet_post_ref = nest.Create("spike_detector") - else: - spikedet_pre_ref = nest.Create("spike_recorder") - spikedet_post_ref = nest.Create("spike_recorder") - mm_ref = nest.Create("multimeter", params={"record_from": ["V_m"]}) - - if sim_mdl: - nest.Connect(pre_sg, pre_neuron, "one_to_one", syn_spec={"delay": 1.}) - nest.Connect(post_sg, post_neuron, "one_to_one", syn_spec={"delay": 1., "weight": 99999.}) - if NESTTools.detect_nest_version().startswith("v2"): - nest.Connect(pre_neuron, post_neuron, "all_to_all", syn_spec={"model": "stdp_nestml_rec"}) - else: - nest.Connect(pre_neuron, post_neuron, "all_to_all", syn_spec={"synapse_model": "stdp_nestml_rec"}) - nest.Connect(mm, post_neuron) - nest.Connect(pre_neuron, spikedet_pre) - nest.Connect(post_neuron, spikedet_post) - if sim_ref: - nest.Connect(pre_sg, pre_neuron_ref, "one_to_one", syn_spec={"delay": 1.}) - nest.Connect(post_sg, post_neuron_ref, "one_to_one", syn_spec={"delay": 1., "weight": 99999.}) - if NESTTools.detect_nest_version().startswith("v2"): - nest.Connect(pre_neuron_ref, post_neuron_ref, "all_to_all", - syn_spec={"model": ref_synapse_model_name}) - else: - nest.Connect(pre_neuron_ref, post_neuron_ref, "all_to_all", - syn_spec={"synapse_model": ref_synapse_model_name}) - nest.Connect(mm_ref, post_neuron_ref) - nest.Connect(pre_neuron_ref, spikedet_pre_ref) - nest.Connect(post_neuron_ref, spikedet_post_ref) - - # get STDP synapse and weight before protocol - if sim_mdl: - syn = nest.GetConnections(source=pre_neuron, synapse_model="stdp_nestml_rec") - if sim_ref: - syn_ref = nest.GetConnections(source=pre_neuron_ref, synapse_model=ref_synapse_model_name) - - n_steps = int(np.ceil(sim_time / resolution)) + 1 - t = 0. - t_hist = [] - if sim_mdl: - w_hist = [] - if sim_ref: - w_hist_ref = [] - while t <= sim_time: - nest.Simulate(resolution) - t += resolution - t_hist.append(t) - if sim_ref: - w_hist_ref.append(nest.GetStatus(syn_ref)[0]["weight"]) - if sim_mdl: - w_hist.append(nest.GetStatus(syn)[0]["w"]) - - # plot - if TEST_PLOTS: - fig, ax = plt.subplots(nrows=2) - ax1, ax2 = ax - - if sim_mdl: - timevec = nest.GetStatus(mm, "events")[0]["times"] - V_m = nest.GetStatus(mm, "events")[0]["V_m"] - ax2.plot(timevec, nest.GetStatus(mm, "events")[0]["post_trace__for_stdp_synapse_nestml"], label="post_tr nestml") - ax1.plot(timevec, V_m, label="nestml", alpha=.7, linestyle=":") - if sim_ref: - pre_ref_spike_times_ = nest.GetStatus(spikedet_pre_ref, "events")[0]["times"] - timevec = nest.GetStatus(mm_ref, "events")[0]["times"] - V_m = nest.GetStatus(mm_ref, "events")[0]["V_m"] - ax1.plot(timevec, V_m, label="nest ref", alpha=.7) - ax1.set_ylabel("V_m") - - for _ax in ax: - _ax.grid(which="major", axis="both") - _ax.grid(which="minor", axis="x", linestyle=":", alpha=.4) - # _ax.minorticks_on() - _ax.set_xlim(0., sim_time) - _ax.legend() - fig.savefig("/tmp/stdp_synapse_test" + fname_snip + "_V_m.png", dpi=300) - - # plot - if TEST_PLOTS: - fig, ax = plt.subplots(nrows=3) - ax1, ax2, ax3 = ax - - if sim_mdl: - pre_spike_times_ = nest.GetStatus(spikedet_pre, "events")[0]["times"] - print("Actual pre spike times: " + str(pre_spike_times_)) - if sim_ref: - pre_ref_spike_times_ = nest.GetStatus(spikedet_pre_ref, "events")[0]["times"] - print("Actual pre ref spike times: " + str(pre_ref_spike_times_)) - - if sim_mdl: - n_spikes = len(pre_spike_times_) - for i in range(n_spikes): - if i == 0: - _lbl = "nestml" - else: - _lbl = None - ax1.plot(2 * [pre_spike_times_[i] + delay], [0, 1], linewidth=2, color="blue", alpha=.4, label=_lbl) - - if sim_mdl: - post_spike_times_ = nest.GetStatus(spikedet_post, "events")[0]["times"] - print("Actual post spike times: " + str(post_spike_times_)) - if sim_ref: - post_ref_spike_times_ = nest.GetStatus(spikedet_post_ref, "events")[0]["times"] - print("Actual post ref spike times: " + str(post_ref_spike_times_)) - - if sim_ref: - n_spikes = len(pre_ref_spike_times_) - for i in range(n_spikes): - if i == 0: - _lbl = "nest ref" - else: - _lbl = None - ax1.plot(2 * [pre_ref_spike_times_[i] + delay], [0, 1], - linewidth=2, color="cyan", label=_lbl, alpha=.4) - ax1.set_ylabel("Pre spikes") - - if sim_mdl: - n_spikes = len(post_spike_times_) - for i in range(n_spikes): - if i == 0: - _lbl = "nestml" - else: - _lbl = None - ax2.plot(2 * [post_spike_times_[i]], [0, 1], linewidth=2, color="black", alpha=.4, label=_lbl) - if sim_ref: - n_spikes = len(post_ref_spike_times_) - for i in range(n_spikes): - if i == 0: - _lbl = "nest ref" - else: - _lbl = None - ax2.plot(2 * [post_ref_spike_times_[i]], [0, 1], linewidth=2, color="red", alpha=.4, label=_lbl) - ax2.plot(timevec, nest.GetStatus(mm, "events")[0]["post_trace__for_stdp_synapse_nestml"], label="nestml post tr") - ax2.set_ylabel("Post spikes") - - if sim_mdl: - ax3.plot(t_hist, w_hist, marker="o", label="nestml") - if sim_ref: - ax3.plot(t_hist, w_hist_ref, linestyle="--", marker="x", label="ref") - - ax3.set_xlabel("Time [ms]") - ax3.set_ylabel("w") - for _ax in ax: - _ax.grid(which="major", axis="both") - _ax.xaxis.set_major_locator(matplotlib.ticker.FixedLocator(np.arange(0, np.ceil(sim_time)))) - _ax.set_xlim(0., sim_time) - _ax.legend() - fig.savefig("/tmp/stdp_synapse_test" + fname_snip + ".png", dpi=300) + codegen_opts=codegen_opts) + def test_foo(self): + nest.Install("nestmlmodule") + n = nest.Create("iaf_psc_exp_neuron_nestml__with_stp_synapse_nestml", 2) + nest.Connect(n[0], n[1], syn_spec={"synapse_model": "stp_synapse_nestml__with_iaf_psc_exp_neuron_nestml"}) + nest.Simulate(100) - # verify - MAX_ABS_ERROR = 1E-6 - assert np.any(np.abs(np.array(w_hist) - 1) > MAX_ABS_ERROR), "No change in the weight!" - assert np.all(np.abs(np.array(w_hist) - np.array(w_hist_ref)) < MAX_ABS_ERROR), \ - "Difference between NESTML model and reference model!" From 6321a7f674dbb2dd8717a00a27055cee63ba5d65 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Mon, 23 Sep 2024 16:29:47 +0200 Subject: [PATCH 2/3] allow vector index in onReceive blocks --- .../point_neuron/common/NeuronClass.jinja2 | 2 +- .../point_neuron/common/NeuronHeader.jinja2 | 2 +- pynestml/generated/PyNestMLParser.py | 593 +++++++++--------- pynestml/grammars/PyNestMLParser.g4 | 2 +- pynestml/meta_model/ast_node_factory.py | 4 +- pynestml/meta_model/ast_on_receive_block.py | 18 +- pynestml/visitors/ast_builder_visitor.py | 8 +- ...onreceive_vector_input_ports_neuron.nestml | 30 + .../test_onreceive_vector_input_ports.py | 109 ++++ 9 files changed, 477 insertions(+), 291 deletions(-) create mode 100644 tests/nest_tests/resources/onreceive_vector_input_ports_neuron.nestml create mode 100644 tests/nest_tests/test_onreceive_vector_input_ports.py diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 index 0051588de..e3ff1258f 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 @@ -1014,7 +1014,7 @@ void {{ neuronName }}::handle(nest::CurrentEvent& e) {%- for blk in neuron.get_on_receive_blocks() %} {%- set ast = blk.get_block() %} void -{{ neuronName }}::on_receive_block_{{ blk.get_port_name() }}() +{{ neuronName }}::on_receive_block_{{ blk.get_port_name() }}{% if blk.has_port_index() %}_VEC_{{ blk.get_port_index() }}{% endif %} () { {%- filter indent(2, True) -%} {%- include "directives_cpp/Block.jinja2" %} diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 index 232ee844b..25eb2104f 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 @@ -409,7 +409,7 @@ public: {% filter indent(2, True) -%} {%- for blk in neuron.get_on_receive_blocks() %} - void on_receive_block_{{ blk.get_port_name() }}(); + void on_receive_block_{{ blk.get_port_name() }}{% if blk.has_port_index() %}_VEC_{{ blk.get_port_index() }}{% endif %}(); {%- endfor %} {%- endfilter %} diff --git a/pynestml/generated/PyNestMLParser.py b/pynestml/generated/PyNestMLParser.py index ecb08158a..c1a55214c 100644 --- a/pynestml/generated/PyNestMLParser.py +++ b/pynestml/generated/PyNestMLParser.py @@ -10,7 +10,7 @@ def serializedATN(): return [ - 4,1,90,598,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,2,6,7, + 4,1,90,604,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,2,6,7, 6,2,7,7,7,2,8,7,8,2,9,7,9,2,10,7,10,2,11,7,11,2,12,7,12,2,13,7,13, 2,14,7,14,2,15,7,15,2,16,7,16,2,17,7,17,2,18,7,18,2,19,7,19,2,20, 7,20,2,21,7,21,2,22,7,22,2,23,7,23,2,24,7,24,2,25,7,25,2,26,7,26, @@ -47,189 +47,191 @@ def serializedATN(): 29,1,29,1,30,1,30,1,30,1,30,1,30,1,31,1,31,4,31,432,8,31,11,31,12, 31,433,1,31,1,31,1,32,1,32,1,32,1,32,1,33,1,33,1,33,1,33,1,33,1, 33,1,33,1,33,1,33,1,33,1,33,4,33,453,8,33,11,33,12,33,454,1,33,1, - 33,1,34,1,34,1,34,1,34,1,34,5,34,464,8,34,10,34,12,34,467,9,34,1, - 34,1,34,1,34,1,34,1,35,1,35,1,35,1,35,1,35,5,35,478,8,35,10,35,12, - 35,481,9,35,1,35,1,35,1,35,1,35,1,36,1,36,1,36,1,36,1,36,4,36,492, - 8,36,11,36,12,36,493,1,36,1,36,1,37,1,37,1,37,1,37,1,38,1,38,1,38, - 1,38,1,38,1,38,1,38,4,38,509,8,38,11,38,12,38,510,1,38,1,38,1,39, - 1,39,1,39,1,39,1,39,1,39,4,39,521,8,39,11,39,12,39,522,1,39,1,39, - 1,40,1,40,1,40,1,40,1,40,3,40,532,8,40,1,40,1,40,5,40,536,8,40,10, - 40,12,40,539,9,40,1,40,1,40,1,40,1,41,1,41,1,41,1,41,1,41,3,41,549, - 8,41,1,41,1,41,1,41,1,41,1,41,1,42,1,42,3,42,558,8,42,1,43,1,43, - 1,43,1,43,1,43,1,43,3,43,566,8,43,1,43,1,43,1,43,1,44,1,44,1,44, - 1,44,1,44,1,44,5,44,577,8,44,10,44,12,44,580,9,44,3,44,582,8,44, - 1,44,1,44,3,44,586,8,44,1,44,1,44,1,44,1,45,1,45,1,45,1,46,1,46, - 1,46,1,46,1,46,0,2,2,6,47,0,2,4,6,8,10,12,14,16,18,20,22,24,26,28, - 30,32,34,36,38,40,42,44,46,48,50,52,54,56,58,60,62,64,66,68,70,72, - 74,76,78,80,82,84,86,88,90,92,0,4,2,0,51,51,75,75,1,0,89,90,1,0, - 32,34,3,0,25,25,86,87,89,90,653,0,100,1,0,0,0,2,111,1,0,0,0,4,128, - 1,0,0,0,6,143,1,0,0,0,8,193,1,0,0,0,10,198,1,0,0,0,12,205,1,0,0, - 0,14,214,1,0,0,0,16,218,1,0,0,0,18,220,1,0,0,0,20,233,1,0,0,0,22, - 248,1,0,0,0,24,266,1,0,0,0,26,280,1,0,0,0,28,299,1,0,0,0,30,310, - 1,0,0,0,32,315,1,0,0,0,34,321,1,0,0,0,36,325,1,0,0,0,38,336,1,0, - 0,0,40,366,1,0,0,0,42,376,1,0,0,0,44,378,1,0,0,0,46,380,1,0,0,0, - 48,382,1,0,0,0,50,386,1,0,0,0,52,396,1,0,0,0,54,401,1,0,0,0,56,406, - 1,0,0,0,58,410,1,0,0,0,60,424,1,0,0,0,62,431,1,0,0,0,64,437,1,0, - 0,0,66,441,1,0,0,0,68,458,1,0,0,0,70,472,1,0,0,0,72,486,1,0,0,0, - 74,497,1,0,0,0,76,501,1,0,0,0,78,514,1,0,0,0,80,526,1,0,0,0,82,543, - 1,0,0,0,84,557,1,0,0,0,86,559,1,0,0,0,88,570,1,0,0,0,90,590,1,0, - 0,0,92,593,1,0,0,0,94,101,5,10,0,0,95,101,5,11,0,0,96,101,5,12,0, - 0,97,101,5,13,0,0,98,101,5,14,0,0,99,101,3,2,1,0,100,94,1,0,0,0, - 100,95,1,0,0,0,100,96,1,0,0,0,100,97,1,0,0,0,100,98,1,0,0,0,100, - 99,1,0,0,0,101,1,1,0,0,0,102,103,6,1,-1,0,103,104,5,49,0,0,104,105, - 3,2,1,0,105,106,5,50,0,0,106,112,1,0,0,0,107,108,5,89,0,0,108,109, - 5,79,0,0,109,112,3,2,1,2,110,112,5,88,0,0,111,102,1,0,0,0,111,107, - 1,0,0,0,111,110,1,0,0,0,112,124,1,0,0,0,113,116,10,3,0,0,114,117, - 5,77,0,0,115,117,5,79,0,0,116,114,1,0,0,0,116,115,1,0,0,0,117,118, - 1,0,0,0,118,123,3,2,1,4,119,120,10,4,0,0,120,121,5,78,0,0,121,123, - 3,4,2,0,122,113,1,0,0,0,122,119,1,0,0,0,123,126,1,0,0,0,124,122, - 1,0,0,0,124,125,1,0,0,0,125,3,1,0,0,0,126,124,1,0,0,0,127,129,7, - 0,0,0,128,127,1,0,0,0,128,129,1,0,0,0,129,130,1,0,0,0,130,131,5, - 89,0,0,131,5,1,0,0,0,132,133,6,3,-1,0,133,134,5,49,0,0,134,135,3, - 6,3,0,135,136,5,50,0,0,136,144,1,0,0,0,137,138,3,10,5,0,138,139, - 3,6,3,9,139,144,1,0,0,0,140,141,5,28,0,0,141,144,3,6,3,4,142,144, - 3,8,4,0,143,132,1,0,0,0,143,137,1,0,0,0,143,140,1,0,0,0,143,142, - 1,0,0,0,144,181,1,0,0,0,145,146,10,10,0,0,146,147,5,78,0,0,147,180, - 3,6,3,10,148,152,10,8,0,0,149,153,5,77,0,0,150,153,5,79,0,0,151, - 153,5,80,0,0,152,149,1,0,0,0,152,150,1,0,0,0,152,151,1,0,0,0,153, - 154,1,0,0,0,154,180,3,6,3,9,155,158,10,7,0,0,156,159,5,51,0,0,157, - 159,5,75,0,0,158,156,1,0,0,0,158,157,1,0,0,0,159,160,1,0,0,0,160, - 180,3,6,3,8,161,162,10,6,0,0,162,163,3,12,6,0,163,164,3,6,3,7,164, - 180,1,0,0,0,165,166,10,5,0,0,166,167,3,14,7,0,167,168,3,6,3,6,168, - 180,1,0,0,0,169,170,10,3,0,0,170,171,3,16,8,0,171,172,3,6,3,4,172, - 180,1,0,0,0,173,174,10,2,0,0,174,175,5,81,0,0,175,176,3,6,3,0,176, - 177,5,82,0,0,177,178,3,6,3,3,178,180,1,0,0,0,179,145,1,0,0,0,179, - 148,1,0,0,0,179,155,1,0,0,0,179,161,1,0,0,0,179,165,1,0,0,0,179, - 169,1,0,0,0,179,173,1,0,0,0,180,183,1,0,0,0,181,179,1,0,0,0,181, - 182,1,0,0,0,182,7,1,0,0,0,183,181,1,0,0,0,184,194,3,20,10,0,185, - 194,5,86,0,0,186,188,7,1,0,0,187,189,3,18,9,0,188,187,1,0,0,0,188, - 189,1,0,0,0,189,194,1,0,0,0,190,194,5,87,0,0,191,194,5,25,0,0,192, - 194,3,18,9,0,193,184,1,0,0,0,193,185,1,0,0,0,193,186,1,0,0,0,193, - 190,1,0,0,0,193,191,1,0,0,0,193,192,1,0,0,0,194,9,1,0,0,0,195,199, - 5,51,0,0,196,199,5,75,0,0,197,199,5,52,0,0,198,195,1,0,0,0,198,196, - 1,0,0,0,198,197,1,0,0,0,199,11,1,0,0,0,200,206,5,55,0,0,201,206, - 5,54,0,0,202,206,5,53,0,0,203,206,5,61,0,0,204,206,5,62,0,0,205, - 200,1,0,0,0,205,201,1,0,0,0,205,202,1,0,0,0,205,203,1,0,0,0,205, - 204,1,0,0,0,206,13,1,0,0,0,207,215,5,63,0,0,208,215,5,65,0,0,209, - 215,5,70,0,0,210,215,5,71,0,0,211,215,5,72,0,0,212,215,5,73,0,0, - 213,215,5,64,0,0,214,207,1,0,0,0,214,208,1,0,0,0,214,209,1,0,0,0, - 214,210,1,0,0,0,214,211,1,0,0,0,214,212,1,0,0,0,214,213,1,0,0,0, - 215,15,1,0,0,0,216,219,5,26,0,0,217,219,5,27,0,0,218,216,1,0,0,0, - 218,217,1,0,0,0,219,17,1,0,0,0,220,225,5,88,0,0,221,222,5,56,0,0, - 222,223,3,6,3,0,223,224,5,58,0,0,224,226,1,0,0,0,225,221,1,0,0,0, - 225,226,1,0,0,0,226,230,1,0,0,0,227,229,5,85,0,0,228,227,1,0,0,0, - 229,232,1,0,0,0,230,228,1,0,0,0,230,231,1,0,0,0,231,19,1,0,0,0,232, - 230,1,0,0,0,233,234,5,88,0,0,234,243,5,49,0,0,235,240,3,6,3,0,236, - 237,5,74,0,0,237,239,3,6,3,0,238,236,1,0,0,0,239,242,1,0,0,0,240, - 238,1,0,0,0,240,241,1,0,0,0,241,244,1,0,0,0,242,240,1,0,0,0,243, - 235,1,0,0,0,243,244,1,0,0,0,244,245,1,0,0,0,245,246,5,50,0,0,246, - 21,1,0,0,0,247,249,5,29,0,0,248,247,1,0,0,0,248,249,1,0,0,0,249, - 250,1,0,0,0,250,251,5,16,0,0,251,252,5,88,0,0,252,253,3,0,0,0,253, - 254,5,76,0,0,254,256,3,6,3,0,255,257,5,84,0,0,256,255,1,0,0,0,256, - 257,1,0,0,0,257,261,1,0,0,0,258,260,3,42,21,0,259,258,1,0,0,0,260, - 263,1,0,0,0,261,259,1,0,0,0,261,262,1,0,0,0,262,264,1,0,0,0,263, - 261,1,0,0,0,264,265,5,9,0,0,265,23,1,0,0,0,266,267,3,18,9,0,267, - 268,5,76,0,0,268,270,3,6,3,0,269,271,5,84,0,0,270,269,1,0,0,0,270, - 271,1,0,0,0,271,275,1,0,0,0,272,274,3,42,21,0,273,272,1,0,0,0,274, - 277,1,0,0,0,275,273,1,0,0,0,275,276,1,0,0,0,276,278,1,0,0,0,277, - 275,1,0,0,0,278,279,5,9,0,0,279,25,1,0,0,0,280,281,5,30,0,0,281, - 282,3,18,9,0,282,283,5,76,0,0,283,291,3,6,3,0,284,285,5,4,0,0,285, - 286,3,18,9,0,286,287,5,76,0,0,287,288,3,6,3,0,288,290,1,0,0,0,289, - 284,1,0,0,0,290,293,1,0,0,0,291,289,1,0,0,0,291,292,1,0,0,0,292, - 295,1,0,0,0,293,291,1,0,0,0,294,296,5,84,0,0,295,294,1,0,0,0,295, - 296,1,0,0,0,296,297,1,0,0,0,297,298,5,9,0,0,298,27,1,0,0,0,299,300, - 5,9,0,0,300,302,5,1,0,0,301,303,3,30,15,0,302,301,1,0,0,0,303,304, - 1,0,0,0,304,302,1,0,0,0,304,305,1,0,0,0,305,306,1,0,0,0,306,307, - 5,2,0,0,307,29,1,0,0,0,308,311,3,34,17,0,309,311,3,32,16,0,310,308, - 1,0,0,0,310,309,1,0,0,0,311,31,1,0,0,0,312,316,3,50,25,0,313,316, - 3,58,29,0,314,316,3,60,30,0,315,312,1,0,0,0,315,313,1,0,0,0,315, - 314,1,0,0,0,316,33,1,0,0,0,317,322,3,36,18,0,318,322,3,20,10,0,319, - 322,3,38,19,0,320,322,3,48,24,0,321,317,1,0,0,0,321,318,1,0,0,0, - 321,319,1,0,0,0,321,320,1,0,0,0,322,323,1,0,0,0,323,324,5,9,0,0, - 324,35,1,0,0,0,325,331,3,18,9,0,326,332,5,76,0,0,327,332,5,66,0, - 0,328,332,5,67,0,0,329,332,5,68,0,0,330,332,5,69,0,0,331,326,1,0, - 0,0,331,327,1,0,0,0,331,328,1,0,0,0,331,329,1,0,0,0,331,330,1,0, - 0,0,332,333,1,0,0,0,333,334,3,6,3,0,334,37,1,0,0,0,335,337,5,29, - 0,0,336,335,1,0,0,0,336,337,1,0,0,0,337,339,1,0,0,0,338,340,5,16, - 0,0,339,338,1,0,0,0,339,340,1,0,0,0,340,341,1,0,0,0,341,346,3,18, - 9,0,342,343,5,74,0,0,343,345,3,18,9,0,344,342,1,0,0,0,345,348,1, - 0,0,0,346,344,1,0,0,0,346,347,1,0,0,0,347,349,1,0,0,0,348,346,1, - 0,0,0,349,352,3,0,0,0,350,351,5,76,0,0,351,353,3,6,3,0,352,350,1, - 0,0,0,352,353,1,0,0,0,353,358,1,0,0,0,354,355,5,59,0,0,355,356,3, - 6,3,0,356,357,5,60,0,0,357,359,1,0,0,0,358,354,1,0,0,0,358,359,1, - 0,0,0,359,363,1,0,0,0,360,362,3,42,21,0,361,360,1,0,0,0,362,365, - 1,0,0,0,363,361,1,0,0,0,363,364,1,0,0,0,364,39,1,0,0,0,365,363,1, - 0,0,0,366,367,3,38,19,0,367,368,5,9,0,0,368,41,1,0,0,0,369,377,5, - 45,0,0,370,377,5,46,0,0,371,372,5,47,0,0,372,373,3,44,22,0,373,374, - 5,83,0,0,374,375,3,46,23,0,375,377,1,0,0,0,376,369,1,0,0,0,376,370, - 1,0,0,0,376,371,1,0,0,0,377,43,1,0,0,0,378,379,5,88,0,0,379,45,1, - 0,0,0,380,381,5,88,0,0,381,47,1,0,0,0,382,384,5,17,0,0,383,385,3, - 6,3,0,384,383,1,0,0,0,384,385,1,0,0,0,385,49,1,0,0,0,386,390,3,52, - 26,0,387,389,3,54,27,0,388,387,1,0,0,0,389,392,1,0,0,0,390,388,1, - 0,0,0,390,391,1,0,0,0,391,394,1,0,0,0,392,390,1,0,0,0,393,395,3, - 56,28,0,394,393,1,0,0,0,394,395,1,0,0,0,395,51,1,0,0,0,396,397,5, - 18,0,0,397,398,3,6,3,0,398,399,5,82,0,0,399,400,3,28,14,0,400,53, - 1,0,0,0,401,402,5,19,0,0,402,403,3,6,3,0,403,404,5,82,0,0,404,405, - 3,28,14,0,405,55,1,0,0,0,406,407,5,20,0,0,407,408,5,82,0,0,408,409, - 3,28,14,0,409,57,1,0,0,0,410,411,5,21,0,0,411,412,5,88,0,0,412,413, - 5,23,0,0,413,414,3,6,3,0,414,415,5,48,0,0,415,416,3,6,3,0,416,418, - 5,24,0,0,417,419,5,75,0,0,418,417,1,0,0,0,418,419,1,0,0,0,419,420, - 1,0,0,0,420,421,7,1,0,0,421,422,5,82,0,0,422,423,3,28,14,0,423,59, - 1,0,0,0,424,425,5,22,0,0,425,426,3,6,3,0,426,427,5,82,0,0,427,428, - 3,28,14,0,428,61,1,0,0,0,429,432,3,64,32,0,430,432,5,9,0,0,431,429, - 1,0,0,0,431,430,1,0,0,0,432,433,1,0,0,0,433,431,1,0,0,0,433,434, - 1,0,0,0,434,435,1,0,0,0,435,436,5,0,0,1,436,63,1,0,0,0,437,438,5, - 31,0,0,438,439,5,88,0,0,439,440,3,66,33,0,440,65,1,0,0,0,441,442, - 5,82,0,0,442,443,5,9,0,0,443,452,5,1,0,0,444,453,3,72,36,0,445,453, - 3,76,38,0,446,453,3,78,39,0,447,453,3,86,43,0,448,453,3,88,44,0, - 449,453,3,68,34,0,450,453,3,70,35,0,451,453,3,74,37,0,452,444,1, - 0,0,0,452,445,1,0,0,0,452,446,1,0,0,0,452,447,1,0,0,0,452,448,1, - 0,0,0,452,449,1,0,0,0,452,450,1,0,0,0,452,451,1,0,0,0,453,454,1, - 0,0,0,454,452,1,0,0,0,454,455,1,0,0,0,455,456,1,0,0,0,456,457,5, - 2,0,0,457,67,1,0,0,0,458,459,5,40,0,0,459,460,5,49,0,0,460,465,5, - 88,0,0,461,462,5,74,0,0,462,464,3,92,46,0,463,461,1,0,0,0,464,467, - 1,0,0,0,465,463,1,0,0,0,465,466,1,0,0,0,466,468,1,0,0,0,467,465, - 1,0,0,0,468,469,5,50,0,0,469,470,5,82,0,0,470,471,3,28,14,0,471, - 69,1,0,0,0,472,473,5,41,0,0,473,474,5,49,0,0,474,479,3,6,3,0,475, - 476,5,74,0,0,476,478,3,92,46,0,477,475,1,0,0,0,478,481,1,0,0,0,479, - 477,1,0,0,0,479,480,1,0,0,0,480,482,1,0,0,0,481,479,1,0,0,0,482, - 483,5,50,0,0,483,484,5,82,0,0,484,485,3,28,14,0,485,71,1,0,0,0,486, - 487,7,2,0,0,487,488,5,82,0,0,488,489,5,9,0,0,489,491,5,1,0,0,490, - 492,3,40,20,0,491,490,1,0,0,0,492,493,1,0,0,0,493,491,1,0,0,0,493, - 494,1,0,0,0,494,495,1,0,0,0,495,496,5,2,0,0,496,73,1,0,0,0,497,498, - 5,35,0,0,498,499,5,82,0,0,499,500,3,28,14,0,500,75,1,0,0,0,501,502, - 5,36,0,0,502,503,5,82,0,0,503,504,5,9,0,0,504,508,5,1,0,0,505,509, - 3,22,11,0,506,509,3,24,12,0,507,509,3,26,13,0,508,505,1,0,0,0,508, - 506,1,0,0,0,508,507,1,0,0,0,509,510,1,0,0,0,510,508,1,0,0,0,510, - 511,1,0,0,0,511,512,1,0,0,0,512,513,5,2,0,0,513,77,1,0,0,0,514,515, - 5,37,0,0,515,516,5,82,0,0,516,517,5,9,0,0,517,520,5,1,0,0,518,521, - 3,80,40,0,519,521,3,82,41,0,520,518,1,0,0,0,520,519,1,0,0,0,521, - 522,1,0,0,0,522,520,1,0,0,0,522,523,1,0,0,0,523,524,1,0,0,0,524, - 525,5,2,0,0,525,79,1,0,0,0,526,531,5,88,0,0,527,528,5,56,0,0,528, - 529,3,6,3,0,529,530,5,58,0,0,530,532,1,0,0,0,531,527,1,0,0,0,531, - 532,1,0,0,0,532,533,1,0,0,0,533,537,5,57,0,0,534,536,3,84,42,0,535, - 534,1,0,0,0,536,539,1,0,0,0,537,535,1,0,0,0,537,538,1,0,0,0,538, - 540,1,0,0,0,539,537,1,0,0,0,540,541,5,42,0,0,541,542,5,9,0,0,542, - 81,1,0,0,0,543,548,5,88,0,0,544,545,5,56,0,0,545,546,3,6,3,0,546, - 547,5,58,0,0,547,549,1,0,0,0,548,544,1,0,0,0,548,549,1,0,0,0,549, - 550,1,0,0,0,550,551,3,0,0,0,551,552,5,57,0,0,552,553,5,39,0,0,553, - 554,5,9,0,0,554,83,1,0,0,0,555,558,5,43,0,0,556,558,5,44,0,0,557, - 555,1,0,0,0,557,556,1,0,0,0,558,85,1,0,0,0,559,560,5,38,0,0,560, - 561,5,82,0,0,561,562,5,9,0,0,562,565,5,1,0,0,563,566,5,42,0,0,564, - 566,5,39,0,0,565,563,1,0,0,0,565,564,1,0,0,0,566,567,1,0,0,0,567, - 568,5,9,0,0,568,569,5,2,0,0,569,87,1,0,0,0,570,571,5,15,0,0,571, - 572,5,88,0,0,572,581,5,49,0,0,573,578,3,90,45,0,574,575,5,74,0,0, - 575,577,3,90,45,0,576,574,1,0,0,0,577,580,1,0,0,0,578,576,1,0,0, - 0,578,579,1,0,0,0,579,582,1,0,0,0,580,578,1,0,0,0,581,573,1,0,0, - 0,581,582,1,0,0,0,582,583,1,0,0,0,583,585,5,50,0,0,584,586,3,0,0, - 0,585,584,1,0,0,0,585,586,1,0,0,0,586,587,1,0,0,0,587,588,5,82,0, - 0,588,589,3,28,14,0,589,89,1,0,0,0,590,591,5,88,0,0,591,592,3,0, - 0,0,592,91,1,0,0,0,593,594,5,88,0,0,594,595,5,76,0,0,595,596,7,3, - 0,0,596,93,1,0,0,0,63,100,111,116,122,124,128,143,152,158,179,181, - 188,193,198,205,214,218,225,230,240,243,248,256,261,270,275,291, - 295,304,310,315,321,331,336,339,346,352,358,363,376,384,390,394, - 418,431,433,452,454,465,479,493,508,510,520,522,531,537,548,557, - 565,578,581,585 + 33,1,34,1,34,1,34,1,34,1,34,1,34,1,34,3,34,466,8,34,1,34,1,34,5, + 34,470,8,34,10,34,12,34,473,9,34,1,34,1,34,1,34,1,34,1,35,1,35,1, + 35,1,35,1,35,5,35,484,8,35,10,35,12,35,487,9,35,1,35,1,35,1,35,1, + 35,1,36,1,36,1,36,1,36,1,36,4,36,498,8,36,11,36,12,36,499,1,36,1, + 36,1,37,1,37,1,37,1,37,1,38,1,38,1,38,1,38,1,38,1,38,1,38,4,38,515, + 8,38,11,38,12,38,516,1,38,1,38,1,39,1,39,1,39,1,39,1,39,1,39,4,39, + 527,8,39,11,39,12,39,528,1,39,1,39,1,40,1,40,1,40,1,40,1,40,3,40, + 538,8,40,1,40,1,40,5,40,542,8,40,10,40,12,40,545,9,40,1,40,1,40, + 1,40,1,41,1,41,1,41,1,41,1,41,3,41,555,8,41,1,41,1,41,1,41,1,41, + 1,41,1,42,1,42,3,42,564,8,42,1,43,1,43,1,43,1,43,1,43,1,43,3,43, + 572,8,43,1,43,1,43,1,43,1,44,1,44,1,44,1,44,1,44,1,44,5,44,583,8, + 44,10,44,12,44,586,9,44,3,44,588,8,44,1,44,1,44,3,44,592,8,44,1, + 44,1,44,1,44,1,45,1,45,1,45,1,46,1,46,1,46,1,46,1,46,0,2,2,6,47, + 0,2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,32,34,36,38,40,42,44, + 46,48,50,52,54,56,58,60,62,64,66,68,70,72,74,76,78,80,82,84,86,88, + 90,92,0,4,2,0,51,51,75,75,1,0,89,90,1,0,32,34,3,0,25,25,86,87,89, + 90,660,0,100,1,0,0,0,2,111,1,0,0,0,4,128,1,0,0,0,6,143,1,0,0,0,8, + 193,1,0,0,0,10,198,1,0,0,0,12,205,1,0,0,0,14,214,1,0,0,0,16,218, + 1,0,0,0,18,220,1,0,0,0,20,233,1,0,0,0,22,248,1,0,0,0,24,266,1,0, + 0,0,26,280,1,0,0,0,28,299,1,0,0,0,30,310,1,0,0,0,32,315,1,0,0,0, + 34,321,1,0,0,0,36,325,1,0,0,0,38,336,1,0,0,0,40,366,1,0,0,0,42,376, + 1,0,0,0,44,378,1,0,0,0,46,380,1,0,0,0,48,382,1,0,0,0,50,386,1,0, + 0,0,52,396,1,0,0,0,54,401,1,0,0,0,56,406,1,0,0,0,58,410,1,0,0,0, + 60,424,1,0,0,0,62,431,1,0,0,0,64,437,1,0,0,0,66,441,1,0,0,0,68,458, + 1,0,0,0,70,478,1,0,0,0,72,492,1,0,0,0,74,503,1,0,0,0,76,507,1,0, + 0,0,78,520,1,0,0,0,80,532,1,0,0,0,82,549,1,0,0,0,84,563,1,0,0,0, + 86,565,1,0,0,0,88,576,1,0,0,0,90,596,1,0,0,0,92,599,1,0,0,0,94,101, + 5,10,0,0,95,101,5,11,0,0,96,101,5,12,0,0,97,101,5,13,0,0,98,101, + 5,14,0,0,99,101,3,2,1,0,100,94,1,0,0,0,100,95,1,0,0,0,100,96,1,0, + 0,0,100,97,1,0,0,0,100,98,1,0,0,0,100,99,1,0,0,0,101,1,1,0,0,0,102, + 103,6,1,-1,0,103,104,5,49,0,0,104,105,3,2,1,0,105,106,5,50,0,0,106, + 112,1,0,0,0,107,108,5,89,0,0,108,109,5,79,0,0,109,112,3,2,1,2,110, + 112,5,88,0,0,111,102,1,0,0,0,111,107,1,0,0,0,111,110,1,0,0,0,112, + 124,1,0,0,0,113,116,10,3,0,0,114,117,5,77,0,0,115,117,5,79,0,0,116, + 114,1,0,0,0,116,115,1,0,0,0,117,118,1,0,0,0,118,123,3,2,1,4,119, + 120,10,4,0,0,120,121,5,78,0,0,121,123,3,4,2,0,122,113,1,0,0,0,122, + 119,1,0,0,0,123,126,1,0,0,0,124,122,1,0,0,0,124,125,1,0,0,0,125, + 3,1,0,0,0,126,124,1,0,0,0,127,129,7,0,0,0,128,127,1,0,0,0,128,129, + 1,0,0,0,129,130,1,0,0,0,130,131,5,89,0,0,131,5,1,0,0,0,132,133,6, + 3,-1,0,133,134,5,49,0,0,134,135,3,6,3,0,135,136,5,50,0,0,136,144, + 1,0,0,0,137,138,3,10,5,0,138,139,3,6,3,9,139,144,1,0,0,0,140,141, + 5,28,0,0,141,144,3,6,3,4,142,144,3,8,4,0,143,132,1,0,0,0,143,137, + 1,0,0,0,143,140,1,0,0,0,143,142,1,0,0,0,144,181,1,0,0,0,145,146, + 10,10,0,0,146,147,5,78,0,0,147,180,3,6,3,10,148,152,10,8,0,0,149, + 153,5,77,0,0,150,153,5,79,0,0,151,153,5,80,0,0,152,149,1,0,0,0,152, + 150,1,0,0,0,152,151,1,0,0,0,153,154,1,0,0,0,154,180,3,6,3,9,155, + 158,10,7,0,0,156,159,5,51,0,0,157,159,5,75,0,0,158,156,1,0,0,0,158, + 157,1,0,0,0,159,160,1,0,0,0,160,180,3,6,3,8,161,162,10,6,0,0,162, + 163,3,12,6,0,163,164,3,6,3,7,164,180,1,0,0,0,165,166,10,5,0,0,166, + 167,3,14,7,0,167,168,3,6,3,6,168,180,1,0,0,0,169,170,10,3,0,0,170, + 171,3,16,8,0,171,172,3,6,3,4,172,180,1,0,0,0,173,174,10,2,0,0,174, + 175,5,81,0,0,175,176,3,6,3,0,176,177,5,82,0,0,177,178,3,6,3,3,178, + 180,1,0,0,0,179,145,1,0,0,0,179,148,1,0,0,0,179,155,1,0,0,0,179, + 161,1,0,0,0,179,165,1,0,0,0,179,169,1,0,0,0,179,173,1,0,0,0,180, + 183,1,0,0,0,181,179,1,0,0,0,181,182,1,0,0,0,182,7,1,0,0,0,183,181, + 1,0,0,0,184,194,3,20,10,0,185,194,5,86,0,0,186,188,7,1,0,0,187,189, + 3,18,9,0,188,187,1,0,0,0,188,189,1,0,0,0,189,194,1,0,0,0,190,194, + 5,87,0,0,191,194,5,25,0,0,192,194,3,18,9,0,193,184,1,0,0,0,193,185, + 1,0,0,0,193,186,1,0,0,0,193,190,1,0,0,0,193,191,1,0,0,0,193,192, + 1,0,0,0,194,9,1,0,0,0,195,199,5,51,0,0,196,199,5,75,0,0,197,199, + 5,52,0,0,198,195,1,0,0,0,198,196,1,0,0,0,198,197,1,0,0,0,199,11, + 1,0,0,0,200,206,5,55,0,0,201,206,5,54,0,0,202,206,5,53,0,0,203,206, + 5,61,0,0,204,206,5,62,0,0,205,200,1,0,0,0,205,201,1,0,0,0,205,202, + 1,0,0,0,205,203,1,0,0,0,205,204,1,0,0,0,206,13,1,0,0,0,207,215,5, + 63,0,0,208,215,5,65,0,0,209,215,5,70,0,0,210,215,5,71,0,0,211,215, + 5,72,0,0,212,215,5,73,0,0,213,215,5,64,0,0,214,207,1,0,0,0,214,208, + 1,0,0,0,214,209,1,0,0,0,214,210,1,0,0,0,214,211,1,0,0,0,214,212, + 1,0,0,0,214,213,1,0,0,0,215,15,1,0,0,0,216,219,5,26,0,0,217,219, + 5,27,0,0,218,216,1,0,0,0,218,217,1,0,0,0,219,17,1,0,0,0,220,225, + 5,88,0,0,221,222,5,56,0,0,222,223,3,6,3,0,223,224,5,58,0,0,224,226, + 1,0,0,0,225,221,1,0,0,0,225,226,1,0,0,0,226,230,1,0,0,0,227,229, + 5,85,0,0,228,227,1,0,0,0,229,232,1,0,0,0,230,228,1,0,0,0,230,231, + 1,0,0,0,231,19,1,0,0,0,232,230,1,0,0,0,233,234,5,88,0,0,234,243, + 5,49,0,0,235,240,3,6,3,0,236,237,5,74,0,0,237,239,3,6,3,0,238,236, + 1,0,0,0,239,242,1,0,0,0,240,238,1,0,0,0,240,241,1,0,0,0,241,244, + 1,0,0,0,242,240,1,0,0,0,243,235,1,0,0,0,243,244,1,0,0,0,244,245, + 1,0,0,0,245,246,5,50,0,0,246,21,1,0,0,0,247,249,5,29,0,0,248,247, + 1,0,0,0,248,249,1,0,0,0,249,250,1,0,0,0,250,251,5,16,0,0,251,252, + 5,88,0,0,252,253,3,0,0,0,253,254,5,76,0,0,254,256,3,6,3,0,255,257, + 5,84,0,0,256,255,1,0,0,0,256,257,1,0,0,0,257,261,1,0,0,0,258,260, + 3,42,21,0,259,258,1,0,0,0,260,263,1,0,0,0,261,259,1,0,0,0,261,262, + 1,0,0,0,262,264,1,0,0,0,263,261,1,0,0,0,264,265,5,9,0,0,265,23,1, + 0,0,0,266,267,3,18,9,0,267,268,5,76,0,0,268,270,3,6,3,0,269,271, + 5,84,0,0,270,269,1,0,0,0,270,271,1,0,0,0,271,275,1,0,0,0,272,274, + 3,42,21,0,273,272,1,0,0,0,274,277,1,0,0,0,275,273,1,0,0,0,275,276, + 1,0,0,0,276,278,1,0,0,0,277,275,1,0,0,0,278,279,5,9,0,0,279,25,1, + 0,0,0,280,281,5,30,0,0,281,282,3,18,9,0,282,283,5,76,0,0,283,291, + 3,6,3,0,284,285,5,4,0,0,285,286,3,18,9,0,286,287,5,76,0,0,287,288, + 3,6,3,0,288,290,1,0,0,0,289,284,1,0,0,0,290,293,1,0,0,0,291,289, + 1,0,0,0,291,292,1,0,0,0,292,295,1,0,0,0,293,291,1,0,0,0,294,296, + 5,84,0,0,295,294,1,0,0,0,295,296,1,0,0,0,296,297,1,0,0,0,297,298, + 5,9,0,0,298,27,1,0,0,0,299,300,5,9,0,0,300,302,5,1,0,0,301,303,3, + 30,15,0,302,301,1,0,0,0,303,304,1,0,0,0,304,302,1,0,0,0,304,305, + 1,0,0,0,305,306,1,0,0,0,306,307,5,2,0,0,307,29,1,0,0,0,308,311,3, + 34,17,0,309,311,3,32,16,0,310,308,1,0,0,0,310,309,1,0,0,0,311,31, + 1,0,0,0,312,316,3,50,25,0,313,316,3,58,29,0,314,316,3,60,30,0,315, + 312,1,0,0,0,315,313,1,0,0,0,315,314,1,0,0,0,316,33,1,0,0,0,317,322, + 3,36,18,0,318,322,3,20,10,0,319,322,3,38,19,0,320,322,3,48,24,0, + 321,317,1,0,0,0,321,318,1,0,0,0,321,319,1,0,0,0,321,320,1,0,0,0, + 322,323,1,0,0,0,323,324,5,9,0,0,324,35,1,0,0,0,325,331,3,18,9,0, + 326,332,5,76,0,0,327,332,5,66,0,0,328,332,5,67,0,0,329,332,5,68, + 0,0,330,332,5,69,0,0,331,326,1,0,0,0,331,327,1,0,0,0,331,328,1,0, + 0,0,331,329,1,0,0,0,331,330,1,0,0,0,332,333,1,0,0,0,333,334,3,6, + 3,0,334,37,1,0,0,0,335,337,5,29,0,0,336,335,1,0,0,0,336,337,1,0, + 0,0,337,339,1,0,0,0,338,340,5,16,0,0,339,338,1,0,0,0,339,340,1,0, + 0,0,340,341,1,0,0,0,341,346,3,18,9,0,342,343,5,74,0,0,343,345,3, + 18,9,0,344,342,1,0,0,0,345,348,1,0,0,0,346,344,1,0,0,0,346,347,1, + 0,0,0,347,349,1,0,0,0,348,346,1,0,0,0,349,352,3,0,0,0,350,351,5, + 76,0,0,351,353,3,6,3,0,352,350,1,0,0,0,352,353,1,0,0,0,353,358,1, + 0,0,0,354,355,5,59,0,0,355,356,3,6,3,0,356,357,5,60,0,0,357,359, + 1,0,0,0,358,354,1,0,0,0,358,359,1,0,0,0,359,363,1,0,0,0,360,362, + 3,42,21,0,361,360,1,0,0,0,362,365,1,0,0,0,363,361,1,0,0,0,363,364, + 1,0,0,0,364,39,1,0,0,0,365,363,1,0,0,0,366,367,3,38,19,0,367,368, + 5,9,0,0,368,41,1,0,0,0,369,377,5,45,0,0,370,377,5,46,0,0,371,372, + 5,47,0,0,372,373,3,44,22,0,373,374,5,83,0,0,374,375,3,46,23,0,375, + 377,1,0,0,0,376,369,1,0,0,0,376,370,1,0,0,0,376,371,1,0,0,0,377, + 43,1,0,0,0,378,379,5,88,0,0,379,45,1,0,0,0,380,381,5,88,0,0,381, + 47,1,0,0,0,382,384,5,17,0,0,383,385,3,6,3,0,384,383,1,0,0,0,384, + 385,1,0,0,0,385,49,1,0,0,0,386,390,3,52,26,0,387,389,3,54,27,0,388, + 387,1,0,0,0,389,392,1,0,0,0,390,388,1,0,0,0,390,391,1,0,0,0,391, + 394,1,0,0,0,392,390,1,0,0,0,393,395,3,56,28,0,394,393,1,0,0,0,394, + 395,1,0,0,0,395,51,1,0,0,0,396,397,5,18,0,0,397,398,3,6,3,0,398, + 399,5,82,0,0,399,400,3,28,14,0,400,53,1,0,0,0,401,402,5,19,0,0,402, + 403,3,6,3,0,403,404,5,82,0,0,404,405,3,28,14,0,405,55,1,0,0,0,406, + 407,5,20,0,0,407,408,5,82,0,0,408,409,3,28,14,0,409,57,1,0,0,0,410, + 411,5,21,0,0,411,412,5,88,0,0,412,413,5,23,0,0,413,414,3,6,3,0,414, + 415,5,48,0,0,415,416,3,6,3,0,416,418,5,24,0,0,417,419,5,75,0,0,418, + 417,1,0,0,0,418,419,1,0,0,0,419,420,1,0,0,0,420,421,7,1,0,0,421, + 422,5,82,0,0,422,423,3,28,14,0,423,59,1,0,0,0,424,425,5,22,0,0,425, + 426,3,6,3,0,426,427,5,82,0,0,427,428,3,28,14,0,428,61,1,0,0,0,429, + 432,3,64,32,0,430,432,5,9,0,0,431,429,1,0,0,0,431,430,1,0,0,0,432, + 433,1,0,0,0,433,431,1,0,0,0,433,434,1,0,0,0,434,435,1,0,0,0,435, + 436,5,0,0,1,436,63,1,0,0,0,437,438,5,31,0,0,438,439,5,88,0,0,439, + 440,3,66,33,0,440,65,1,0,0,0,441,442,5,82,0,0,442,443,5,9,0,0,443, + 452,5,1,0,0,444,453,3,72,36,0,445,453,3,76,38,0,446,453,3,78,39, + 0,447,453,3,86,43,0,448,453,3,88,44,0,449,453,3,68,34,0,450,453, + 3,70,35,0,451,453,3,74,37,0,452,444,1,0,0,0,452,445,1,0,0,0,452, + 446,1,0,0,0,452,447,1,0,0,0,452,448,1,0,0,0,452,449,1,0,0,0,452, + 450,1,0,0,0,452,451,1,0,0,0,453,454,1,0,0,0,454,452,1,0,0,0,454, + 455,1,0,0,0,455,456,1,0,0,0,456,457,5,2,0,0,457,67,1,0,0,0,458,459, + 5,40,0,0,459,460,5,49,0,0,460,465,5,88,0,0,461,462,5,56,0,0,462, + 463,3,6,3,0,463,464,5,58,0,0,464,466,1,0,0,0,465,461,1,0,0,0,465, + 466,1,0,0,0,466,471,1,0,0,0,467,468,5,74,0,0,468,470,3,92,46,0,469, + 467,1,0,0,0,470,473,1,0,0,0,471,469,1,0,0,0,471,472,1,0,0,0,472, + 474,1,0,0,0,473,471,1,0,0,0,474,475,5,50,0,0,475,476,5,82,0,0,476, + 477,3,28,14,0,477,69,1,0,0,0,478,479,5,41,0,0,479,480,5,49,0,0,480, + 485,3,6,3,0,481,482,5,74,0,0,482,484,3,92,46,0,483,481,1,0,0,0,484, + 487,1,0,0,0,485,483,1,0,0,0,485,486,1,0,0,0,486,488,1,0,0,0,487, + 485,1,0,0,0,488,489,5,50,0,0,489,490,5,82,0,0,490,491,3,28,14,0, + 491,71,1,0,0,0,492,493,7,2,0,0,493,494,5,82,0,0,494,495,5,9,0,0, + 495,497,5,1,0,0,496,498,3,40,20,0,497,496,1,0,0,0,498,499,1,0,0, + 0,499,497,1,0,0,0,499,500,1,0,0,0,500,501,1,0,0,0,501,502,5,2,0, + 0,502,73,1,0,0,0,503,504,5,35,0,0,504,505,5,82,0,0,505,506,3,28, + 14,0,506,75,1,0,0,0,507,508,5,36,0,0,508,509,5,82,0,0,509,510,5, + 9,0,0,510,514,5,1,0,0,511,515,3,22,11,0,512,515,3,24,12,0,513,515, + 3,26,13,0,514,511,1,0,0,0,514,512,1,0,0,0,514,513,1,0,0,0,515,516, + 1,0,0,0,516,514,1,0,0,0,516,517,1,0,0,0,517,518,1,0,0,0,518,519, + 5,2,0,0,519,77,1,0,0,0,520,521,5,37,0,0,521,522,5,82,0,0,522,523, + 5,9,0,0,523,526,5,1,0,0,524,527,3,80,40,0,525,527,3,82,41,0,526, + 524,1,0,0,0,526,525,1,0,0,0,527,528,1,0,0,0,528,526,1,0,0,0,528, + 529,1,0,0,0,529,530,1,0,0,0,530,531,5,2,0,0,531,79,1,0,0,0,532,537, + 5,88,0,0,533,534,5,56,0,0,534,535,3,6,3,0,535,536,5,58,0,0,536,538, + 1,0,0,0,537,533,1,0,0,0,537,538,1,0,0,0,538,539,1,0,0,0,539,543, + 5,57,0,0,540,542,3,84,42,0,541,540,1,0,0,0,542,545,1,0,0,0,543,541, + 1,0,0,0,543,544,1,0,0,0,544,546,1,0,0,0,545,543,1,0,0,0,546,547, + 5,42,0,0,547,548,5,9,0,0,548,81,1,0,0,0,549,554,5,88,0,0,550,551, + 5,56,0,0,551,552,3,6,3,0,552,553,5,58,0,0,553,555,1,0,0,0,554,550, + 1,0,0,0,554,555,1,0,0,0,555,556,1,0,0,0,556,557,3,0,0,0,557,558, + 5,57,0,0,558,559,5,39,0,0,559,560,5,9,0,0,560,83,1,0,0,0,561,564, + 5,43,0,0,562,564,5,44,0,0,563,561,1,0,0,0,563,562,1,0,0,0,564,85, + 1,0,0,0,565,566,5,38,0,0,566,567,5,82,0,0,567,568,5,9,0,0,568,571, + 5,1,0,0,569,572,5,42,0,0,570,572,5,39,0,0,571,569,1,0,0,0,571,570, + 1,0,0,0,572,573,1,0,0,0,573,574,5,9,0,0,574,575,5,2,0,0,575,87,1, + 0,0,0,576,577,5,15,0,0,577,578,5,88,0,0,578,587,5,49,0,0,579,584, + 3,90,45,0,580,581,5,74,0,0,581,583,3,90,45,0,582,580,1,0,0,0,583, + 586,1,0,0,0,584,582,1,0,0,0,584,585,1,0,0,0,585,588,1,0,0,0,586, + 584,1,0,0,0,587,579,1,0,0,0,587,588,1,0,0,0,588,589,1,0,0,0,589, + 591,5,50,0,0,590,592,3,0,0,0,591,590,1,0,0,0,591,592,1,0,0,0,592, + 593,1,0,0,0,593,594,5,82,0,0,594,595,3,28,14,0,595,89,1,0,0,0,596, + 597,5,88,0,0,597,598,3,0,0,0,598,91,1,0,0,0,599,600,5,88,0,0,600, + 601,5,76,0,0,601,602,7,3,0,0,602,93,1,0,0,0,64,100,111,116,122,124, + 128,143,152,158,179,181,188,193,198,205,214,218,225,230,240,243, + 248,256,261,270,275,291,295,304,310,315,321,331,336,339,346,352, + 358,363,376,384,390,394,418,431,433,452,454,465,471,485,499,514, + 516,526,528,537,543,554,563,571,584,587,591 ] class PyNestMLParser ( Parser ): @@ -3388,6 +3390,7 @@ def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): super().__init__(parent, invokingState) self.parser = parser self.inputPortName = None # Token + self.vectorParameter = None # ExpressionContext def ON_RECEIVE_KEYWORD(self): return self.getToken(PyNestMLParser.ON_RECEIVE_KEYWORD, 0) @@ -3408,6 +3411,12 @@ def block(self): def NAME(self): return self.getToken(PyNestMLParser.NAME, 0) + def LEFT_SQUARE_BRACKET(self): + return self.getToken(PyNestMLParser.LEFT_SQUARE_BRACKET, 0) + + def RIGHT_SQUARE_BRACKET(self): + return self.getToken(PyNestMLParser.RIGHT_SQUARE_BRACKET, 0) + def COMMA(self, i:int=None): if i is None: return self.getTokens(PyNestMLParser.COMMA) @@ -3421,6 +3430,10 @@ def constParameter(self, i:int=None): return self.getTypedRuleContext(PyNestMLParser.ConstParameterContext,i) + def expression(self): + return self.getTypedRuleContext(PyNestMLParser.ExpressionContext,0) + + def getRuleIndex(self): return PyNestMLParser.RULE_onReceiveBlock @@ -3449,20 +3462,32 @@ def onReceiveBlock(self): self.state = 465 self._errHandler.sync(self) _la = self._input.LA(1) - while _la==74: + if _la==56: self.state = 461 - self.match(PyNestMLParser.COMMA) + self.match(PyNestMLParser.LEFT_SQUARE_BRACKET) self.state = 462 - self.constParameter() + localctx.vectorParameter = self.expression(0) + self.state = 463 + self.match(PyNestMLParser.RIGHT_SQUARE_BRACKET) + + + self.state = 471 + self._errHandler.sync(self) + _la = self._input.LA(1) + while _la==74: self.state = 467 + self.match(PyNestMLParser.COMMA) + self.state = 468 + self.constParameter() + self.state = 473 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 468 + self.state = 474 self.match(PyNestMLParser.RIGHT_PAREN) - self.state = 469 + self.state = 475 self.match(PyNestMLParser.COLON) - self.state = 470 + self.state = 476 self.block() except RecognitionException as re: localctx.exception = re @@ -3533,29 +3558,29 @@ def onConditionBlock(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 472 + self.state = 478 self.match(PyNestMLParser.ON_CONDITION_KEYWORD) - self.state = 473 + self.state = 479 self.match(PyNestMLParser.LEFT_PAREN) - self.state = 474 + self.state = 480 localctx.condition = self.expression(0) - self.state = 479 + self.state = 485 self._errHandler.sync(self) _la = self._input.LA(1) while _la==74: - self.state = 475 + self.state = 481 self.match(PyNestMLParser.COMMA) - self.state = 476 + self.state = 482 self.constParameter() - self.state = 481 + self.state = 487 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 482 + self.state = 488 self.match(PyNestMLParser.RIGHT_PAREN) - self.state = 483 + self.state = 489 self.match(PyNestMLParser.COLON) - self.state = 484 + self.state = 490 self.block() except RecognitionException as re: localctx.exception = re @@ -3621,7 +3646,7 @@ def blockWithVariables(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 486 + self.state = 492 localctx.blockType = self._input.LT(1) _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & 30064771072) != 0)): @@ -3629,25 +3654,25 @@ def blockWithVariables(self): else: self._errHandler.reportMatch(self) self.consume() - self.state = 487 + self.state = 493 self.match(PyNestMLParser.COLON) - self.state = 488 + self.state = 494 self.match(PyNestMLParser.NEWLINE) - self.state = 489 + self.state = 495 self.match(PyNestMLParser.INDENT) - self.state = 491 + self.state = 497 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 490 + self.state = 496 self.declaration_newline() - self.state = 493 + self.state = 499 self._errHandler.sync(self) _la = self._input.LA(1) if not (_la==16 or _la==29 or _la==88): break - self.state = 495 + self.state = 501 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -3693,11 +3718,11 @@ def updateBlock(self): self.enterRule(localctx, 74, self.RULE_updateBlock) try: self.enterOuterAlt(localctx, 1) - self.state = 497 + self.state = 503 self.match(PyNestMLParser.UPDATE_KEYWORD) - self.state = 498 + self.state = 504 self.match(PyNestMLParser.COLON) - self.state = 499 + self.state = 505 self.block() except RecognitionException as re: localctx.exception = re @@ -3770,43 +3795,43 @@ def equationsBlock(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 501 + self.state = 507 self.match(PyNestMLParser.EQUATIONS_KEYWORD) - self.state = 502 + self.state = 508 self.match(PyNestMLParser.COLON) - self.state = 503 + self.state = 509 self.match(PyNestMLParser.NEWLINE) - self.state = 504 + self.state = 510 self.match(PyNestMLParser.INDENT) - self.state = 508 + self.state = 514 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 508 + self.state = 514 self._errHandler.sync(self) token = self._input.LA(1) if token in [16, 29]: - self.state = 505 + self.state = 511 self.inlineExpression() pass elif token in [88]: - self.state = 506 + self.state = 512 self.odeEquation() pass elif token in [30]: - self.state = 507 + self.state = 513 self.kernel() pass else: raise NoViableAltException(self) - self.state = 510 + self.state = 516 self._errHandler.sync(self) _la = self._input.LA(1) if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & 1610678272) != 0) or _la==88): break - self.state = 512 + self.state = 518 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -3872,39 +3897,39 @@ def inputBlock(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 514 + self.state = 520 self.match(PyNestMLParser.INPUT_KEYWORD) - self.state = 515 + self.state = 521 self.match(PyNestMLParser.COLON) - self.state = 516 + self.state = 522 self.match(PyNestMLParser.NEWLINE) - self.state = 517 + self.state = 523 self.match(PyNestMLParser.INDENT) - self.state = 520 + self.state = 526 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 520 + self.state = 526 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,53,self._ctx) + la_ = self._interp.adaptivePredict(self._input,54,self._ctx) if la_ == 1: - self.state = 518 + self.state = 524 self.spikeInputPort() pass elif la_ == 2: - self.state = 519 + self.state = 525 self.continuousInputPort() pass - self.state = 522 + self.state = 528 self._errHandler.sync(self) _la = self._input.LA(1) if not (_la==88): break - self.state = 524 + self.state = 530 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -3972,35 +3997,35 @@ def spikeInputPort(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 526 + self.state = 532 localctx.name = self.match(PyNestMLParser.NAME) - self.state = 531 + self.state = 537 self._errHandler.sync(self) _la = self._input.LA(1) if _la==56: - self.state = 527 + self.state = 533 self.match(PyNestMLParser.LEFT_SQUARE_BRACKET) - self.state = 528 + self.state = 534 localctx.sizeParameter = self.expression(0) - self.state = 529 + self.state = 535 self.match(PyNestMLParser.RIGHT_SQUARE_BRACKET) - self.state = 533 + self.state = 539 self.match(PyNestMLParser.LEFT_ANGLE_MINUS) - self.state = 537 + self.state = 543 self._errHandler.sync(self) _la = self._input.LA(1) while _la==43 or _la==44: - self.state = 534 + self.state = 540 self.inputQualifier() - self.state = 539 + self.state = 545 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 540 + self.state = 546 self.match(PyNestMLParser.SPIKE_KEYWORD) - self.state = 541 + self.state = 547 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -4065,27 +4090,27 @@ def continuousInputPort(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 543 + self.state = 549 localctx.name = self.match(PyNestMLParser.NAME) - self.state = 548 + self.state = 554 self._errHandler.sync(self) _la = self._input.LA(1) if _la==56: - self.state = 544 + self.state = 550 self.match(PyNestMLParser.LEFT_SQUARE_BRACKET) - self.state = 545 + self.state = 551 localctx.sizeParameter = self.expression(0) - self.state = 546 + self.state = 552 self.match(PyNestMLParser.RIGHT_SQUARE_BRACKET) - self.state = 550 + self.state = 556 self.dataType() - self.state = 551 + self.state = 557 self.match(PyNestMLParser.LEFT_ANGLE_MINUS) - self.state = 552 + self.state = 558 self.match(PyNestMLParser.CONTINUOUS_KEYWORD) - self.state = 553 + self.state = 559 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -4129,15 +4154,15 @@ def inputQualifier(self): self.enterRule(localctx, 84, self.RULE_inputQualifier) try: self.enterOuterAlt(localctx, 1) - self.state = 557 + self.state = 563 self._errHandler.sync(self) token = self._input.LA(1) if token in [43]: - self.state = 555 + self.state = 561 localctx.isInhibitory = self.match(PyNestMLParser.INHIBITORY_KEYWORD) pass elif token in [44]: - self.state = 556 + self.state = 562 localctx.isExcitatory = self.match(PyNestMLParser.EXCITATORY_KEYWORD) pass else: @@ -4203,31 +4228,31 @@ def outputBlock(self): self.enterRule(localctx, 86, self.RULE_outputBlock) try: self.enterOuterAlt(localctx, 1) - self.state = 559 + self.state = 565 self.match(PyNestMLParser.OUTPUT_KEYWORD) - self.state = 560 + self.state = 566 self.match(PyNestMLParser.COLON) - self.state = 561 + self.state = 567 self.match(PyNestMLParser.NEWLINE) - self.state = 562 + self.state = 568 self.match(PyNestMLParser.INDENT) - self.state = 565 + self.state = 571 self._errHandler.sync(self) token = self._input.LA(1) if token in [42]: - self.state = 563 + self.state = 569 localctx.isSpike = self.match(PyNestMLParser.SPIKE_KEYWORD) pass elif token in [39]: - self.state = 564 + self.state = 570 localctx.isContinuous = self.match(PyNestMLParser.CONTINUOUS_KEYWORD) pass else: raise NoViableAltException(self) - self.state = 567 + self.state = 573 self.match(PyNestMLParser.NEWLINE) - self.state = 568 + self.state = 574 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -4301,45 +4326,45 @@ def function(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 570 + self.state = 576 self.match(PyNestMLParser.FUNCTION_KEYWORD) - self.state = 571 + self.state = 577 self.match(PyNestMLParser.NAME) - self.state = 572 + self.state = 578 self.match(PyNestMLParser.LEFT_PAREN) - self.state = 581 + self.state = 587 self._errHandler.sync(self) _la = self._input.LA(1) if _la==88: - self.state = 573 + self.state = 579 self.parameter() - self.state = 578 + self.state = 584 self._errHandler.sync(self) _la = self._input.LA(1) while _la==74: - self.state = 574 + self.state = 580 self.match(PyNestMLParser.COMMA) - self.state = 575 + self.state = 581 self.parameter() - self.state = 580 + self.state = 586 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 583 + self.state = 589 self.match(PyNestMLParser.RIGHT_PAREN) - self.state = 585 + self.state = 591 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & 562949953453056) != 0) or _la==88 or _la==89: - self.state = 584 + self.state = 590 localctx.returnType = self.dataType() - self.state = 587 + self.state = 593 self.match(PyNestMLParser.COLON) - self.state = 588 + self.state = 594 self.block() except RecognitionException as re: localctx.exception = re @@ -4382,9 +4407,9 @@ def parameter(self): self.enterRule(localctx, 90, self.RULE_parameter) try: self.enterOuterAlt(localctx, 1) - self.state = 590 + self.state = 596 self.match(PyNestMLParser.NAME) - self.state = 591 + self.state = 597 self.dataType() except RecognitionException as re: localctx.exception = re @@ -4444,11 +4469,11 @@ def constParameter(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 593 + self.state = 599 localctx.name = self.match(PyNestMLParser.NAME) - self.state = 594 + self.state = 600 self.match(PyNestMLParser.EQUALS) - self.state = 595 + self.state = 601 localctx.value = self._input.LT(1) _la = self._input.LA(1) if not(_la==25 or ((((_la - 86)) & ~0x3f) == 0 and ((1 << (_la - 86)) & 27) != 0)): diff --git a/pynestml/grammars/PyNestMLParser.g4 b/pynestml/grammars/PyNestMLParser.g4 index 5d2af2d50..a23ed14d7 100644 --- a/pynestml/grammars/PyNestMLParser.g4 +++ b/pynestml/grammars/PyNestMLParser.g4 @@ -238,7 +238,7 @@ parser grammar PyNestMLParser; /** ASTOnReceiveBlock @attribute block implementation of the dynamics */ - onReceiveBlock: ON_RECEIVE_KEYWORD LEFT_PAREN inputPortName=NAME (COMMA constParameter)* RIGHT_PAREN COLON + onReceiveBlock: ON_RECEIVE_KEYWORD LEFT_PAREN inputPortName=NAME (LEFT_SQUARE_BRACKET vectorParameter=expression RIGHT_SQUARE_BRACKET)? (COMMA constParameter)* RIGHT_PAREN COLON block; /** ASTOnConditionBlock diff --git a/pynestml/meta_model/ast_node_factory.py b/pynestml/meta_model/ast_node_factory.py index da3986be9..a5ce2e8f5 100644 --- a/pynestml/meta_model/ast_node_factory.py +++ b/pynestml/meta_model/ast_node_factory.py @@ -116,8 +116,8 @@ def create_ast_namespace_decorator(cls, namespace=None, name=None, source_positi return ASTNamespaceDecorator(namespace, name, source_position=source_position) @classmethod - def create_ast_on_receive_block(cls, block=None, port_name=None, const_parameters=None, source_position=None): - return ASTOnReceiveBlock(block, port_name, const_parameters, source_position=source_position) + def create_ast_on_receive_block(cls, block=None, port_name=None, port_index=None, const_parameters=None, source_position=None): + return ASTOnReceiveBlock(block, port_name, port_index, const_parameters, source_position=source_position) @classmethod def create_ast_on_condition_block(cls, block=None, cond_expr=None, const_parameters=None, source_position=None): diff --git a/pynestml/meta_model/ast_on_receive_block.py b/pynestml/meta_model/ast_on_receive_block.py index d7ca37812..b406d9c12 100644 --- a/pynestml/meta_model/ast_on_receive_block.py +++ b/pynestml/meta_model/ast_on_receive_block.py @@ -38,7 +38,7 @@ class ASTOnReceiveBlock(ASTNode): """ - def __init__(self, block: ASTBlock, port_name: str, const_parameters: Optional[Mapping] = None, *args, **kwargs): + def __init__(self, block: ASTBlock, port_name: str, port_index: Optional[int], const_parameters: Optional[Mapping] = None, *args, **kwargs): r""" Standard constructor. :param block: a block of definitions. @@ -47,6 +47,7 @@ def __init__(self, block: ASTBlock, port_name: str, const_parameters: Optional[M super(ASTOnReceiveBlock, self).__init__(*args, **kwargs) self.block = block self.port_name = port_name + self.port_index = port_index self.const_parameters = const_parameters if self.const_parameters is None: self.const_parameters = {} @@ -59,6 +60,7 @@ def clone(self) -> ASTOnReceiveBlock: """ dup = ASTOnReceiveBlock(block=self.block.clone(), port_name=self.port_name, + port_index=self.port_index, const_parameters=self.const_parameters, # ASTNode common attributes: source_position=self.source_position, @@ -87,6 +89,20 @@ def get_port_name(self) -> str: """ return self.port_name + def has_port_index(self) -> bool: + r""" + Returns whether there is a port vector index + :return: the port index + """ + return self.port_index is not None + + def get_port_index(self) -> Optional[int]: + r""" + Returns the port vector index if there is one, otherwise None + :return: the port index + """ + return self.port_index + def get_children(self) -> List[ASTNode]: r""" Returns the children of this node, if any. diff --git a/pynestml/visitors/ast_builder_visitor.py b/pynestml/visitors/ast_builder_visitor.py index 0e766d530..e5c50dea2 100644 --- a/pynestml/visitors/ast_builder_visitor.py +++ b/pynestml/visitors/ast_builder_visitor.py @@ -699,7 +699,13 @@ def visitOnReceiveBlock(self, ctx): const_parameters = {} for el in ctx.constParameter(): const_parameters[el.name.text] = el.value.text - ret = ASTNodeFactory.create_ast_on_receive_block(block=block, port_name=port_name, const_parameters=const_parameters, source_position=create_source_pos(ctx)) + + port_index = None + if ctx.vectorParameter: + vector_parameter = self.visit(ctx.vectorParameter) + port_index = int(vector_parameter.get_numeric_literal()) + + ret = ASTNodeFactory.create_ast_on_receive_block(block=block, port_name=port_name, port_index=port_index, const_parameters=const_parameters, source_position=create_source_pos(ctx)) update_node_comments(ret, self.__comments.visit(ctx)) return ret diff --git a/tests/nest_tests/resources/onreceive_vector_input_ports_neuron.nestml b/tests/nest_tests/resources/onreceive_vector_input_ports_neuron.nestml new file mode 100644 index 000000000..7d54939fb --- /dev/null +++ b/tests/nest_tests/resources/onreceive_vector_input_ports_neuron.nestml @@ -0,0 +1,30 @@ +""" +onreceive_vector_input_ports_neuron +################################### + +Description ++++++++++++ + +Used in NESTML unit testing. + +""" +model onreceive_vector_input_ports_neuron: + state: + x real = 0 + y real = 0 + z real = 0 + + input: + spikes[2] <- spike + + output: + spike + + onReceive(spikes): + x += 1 + + onReceive(spikes[0]): + y += 1 + + onReceive(spikes[1]): + z += 1 diff --git a/tests/nest_tests/test_onreceive_vector_input_ports.py b/tests/nest_tests/test_onreceive_vector_input_ports.py new file mode 100644 index 000000000..555e88638 --- /dev/null +++ b/tests/nest_tests/test_onreceive_vector_input_ports.py @@ -0,0 +1,109 @@ +# -*- coding: utf-8 -*- +# +# test_on_receive_vector_input_ports.py +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . + +import numpy as np +import os +import pytest + +import nest + +from pynestml.codegeneration.nest_tools import NESTTools +from pynestml.frontend.pynestml_frontend import generate_nest_target + +try: + import matplotlib + import matplotlib.pyplot as plt + TEST_PLOTS = True +except BaseException: + TEST_PLOTS = False + + +@pytest.mark.skipif(NESTTools.detect_nest_version().startswith("v2"), + reason="This test does not support NEST 2") +class TestOnReceiveVectorInputPorts: + + def test_multisynapse_with_vector_input_ports(self): + input_path = os.path.join(os.path.realpath(os.path.join( + os.path.dirname(__file__), "resources", "onreceive_vector_input_ports_neuron.nestml"))) + target_path = "target" + logging_level = "DEBUG" + suffix = "_nestml" + + generate_nest_target(input_path, + target_path=target_path, + logging_level=logging_level, + suffix=suffix) + + nest.ResetKernel() + nest.set_verbosity("M_ALL") + nest.resolution = 0.1 + try: + nest.Install("nestmlmodule") + except Exception: + # ResetKernel() does not unload modules for NEST Simulator < v3.7; ignore exception if module is already loaded on earlier versions + pass + + # network construction + neuron = nest.Create("onreceive_vector_input_ports_neuron_nestml") + + # List of receptor types for the spiking input ports + receptor_types = nest.GetStatus(neuron, "receptor_types")[0] + + sg = nest.Create("spike_generator", params={"spike_times": [20., 80.]}) + nest.Connect(sg, neuron, syn_spec={"receptor_type": receptor_types["SPIKES_0"], "weight": 1000., "delay": 0.1}) + + sg2 = nest.Create("spike_generator", params={"spike_times": [40., 60.]}) + nest.Connect(sg2, neuron, syn_spec={"receptor_type": receptor_types["SPIKES_1"], "weight": 1000., "delay": 0.1}) + + mm = nest.Create("multimeter", params={"record_from": ["x", "y", "z"], "interval": nest.resolution}) + nest.Connect(mm, neuron) + + # simulate + nest.Simulate(125.) + + if TEST_PLOTS: + fig, ax = plt.subplots(nrows=3) + + ax[0].plot(mm.events["times"], mm.events["x"], label="x") + ax[0].set_ylabel("voltage") + + ax[1].plot(mm.events["times"], mm.events["y"], label="y") + ax[1].set_ylabel("current") + + ax[2].plot(mm.events["times"], mm.events["z"], label="z") + ax[2].set_ylabel("current") + + for _ax in ax: + _ax.legend(loc="upper right") + _ax.set_xlim(0., 125.) + _ax.grid(True) + + for _ax in ax[:-1]: + _ax.set_xticklabels([]) + + ax[-1].set_xlabel("time") + + fig.savefig("/tmp/test_onreceive_vector_input_ports.png") + + # testing + np.testing.assert_almost_equal(mm.events["x"][-1], 4) + np.testing.assert_almost_equal(mm.events["y"][-1], 2) + np.testing.assert_almost_equal(mm.events["z"][-1], 2) From e2572d8b72f29f2c04f1707501b5fe2ac91b11f5 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Mon, 23 Sep 2024 16:35:37 +0200 Subject: [PATCH 3/3] Revert "stp synapse name change" This reverts commit a0866860151507606c8cfe06f6226160285e7d1f. --- ...stp_synapse.nestml => stdp_synapse.nestml} | 2 +- tests/nest_tests/stdp_synapse_test.py | 318 +++++++++++++++++- 2 files changed, 304 insertions(+), 16 deletions(-) rename models/synapses/{stp_synapse.nestml => stdp_synapse.nestml} (99%) diff --git a/models/synapses/stp_synapse.nestml b/models/synapses/stdp_synapse.nestml similarity index 99% rename from models/synapses/stp_synapse.nestml rename to models/synapses/stdp_synapse.nestml index b858d2b4d..f04b4b971 100644 --- a/models/synapses/stp_synapse.nestml +++ b/models/synapses/stdp_synapse.nestml @@ -33,7 +33,7 @@ References Stable Hebbian learning from spike timing-dependent plasticity, Journal of Neuroscience, 20:23,8812--8821 """ -model stp_synapse: +model stdp_synapse: state: w real = 1 # Synaptic weight pre_trace real = 0. diff --git a/tests/nest_tests/stdp_synapse_test.py b/tests/nest_tests/stdp_synapse_test.py index 309e0e08c..e6ec31cb9 100644 --- a/tests/nest_tests/stdp_synapse_test.py +++ b/tests/nest_tests/stdp_synapse_test.py @@ -45,30 +45,318 @@ class TestNestSTDPSynapse: - neuron_model_name = "iaf_psc_exp_neuron_nestml__with_stp_synapse_nestml" - synapse_model_name = "stp_synapse_nestml__with_iaf_psc_exp_neuron_nestml" + neuron_model_name = "iaf_psc_exp_neuron_nestml__with_stdp_synapse_nestml" + ref_neuron_model_name = "iaf_psc_exp_neuron_nestml_non_jit" + + synapse_model_name = "stdp_synapse_nestml__with_iaf_psc_exp_neuron_nestml" + ref_synapse_model_name = "stdp_synapse" @pytest.fixture(autouse=True, scope="module") def generate_model_code(self): """Generate the model code""" - codegen_opts = {"neuron_synapse_pairs": [{"neuron": "iaf_psc_exp_neuron", - "synapse": "stp_synapse", + jit_codegen_opts = {"neuron_synapse_pairs": [{"neuron": "iaf_psc_exp_neuron", + "synapse": "stdp_synapse", "post_ports": ["post_spikes"]}], - "delay_variable": {"stp_synapse": "d"}, - "weight_variable": {"stp_synapse": "w"}} + "delay_variable": {"stdp_synapse": "d"}, + "weight_variable": {"stdp_synapse": "w"}} + if not NESTTools.detect_nest_version().startswith("v2"): + jit_codegen_opts["neuron_parent_class"] = "StructuralPlasticityNode" + jit_codegen_opts["neuron_parent_class_include"] = "structural_plasticity_node.h" + + # generate the "jit" model (co-generated neuron and synapse), that does not rely on ArchivingNode files = [os.path.join("models", "neurons", "iaf_psc_exp_neuron.nestml"), - os.path.join("models", "synapses", "stp_synapse.nestml")] + os.path.join("models", "synapses", "stdp_synapse.nestml")] input_path = [os.path.realpath(os.path.join(os.path.dirname(__file__), os.path.join( - os.pardir, os.pardir, s))) for s in files] + os.pardir, os.pardir, s))) for s in files] generate_nest_target(input_path=input_path, - logging_level="DEBUG", + target_path="/tmp/nestml-jit", + logging_level="INFO", + module_name="nestml_jit_module", suffix="_nestml", - codegen_opts=codegen_opts) - def test_foo(self): - nest.Install("nestmlmodule") - n = nest.Create("iaf_psc_exp_neuron_nestml__with_stp_synapse_nestml", 2) - nest.Connect(n[0], n[1], syn_spec={"synapse_model": "stp_synapse_nestml__with_iaf_psc_exp_neuron_nestml"}) - nest.Simulate(100) + codegen_opts=jit_codegen_opts) + + if NESTTools.detect_nest_version().startswith("v2"): + non_jit_codegen_opts = {"neuron_parent_class": "Archiving_Node", + "neuron_parent_class_include": "archiving_node.h"} + else: + non_jit_codegen_opts = {"neuron_parent_class": "ArchivingNode", + "neuron_parent_class_include": "archiving_node.h"} + + # generate the "non-jit" model, that relies on ArchivingNode + generate_nest_target(input_path=os.path.realpath(os.path.join(os.path.dirname(__file__), + os.path.join(os.pardir, os.pardir, "models", "neurons", "iaf_psc_exp_neuron.nestml"))), + target_path="/tmp/nestml-non-jit", + logging_level="INFO", + module_name="nestml_non_jit_module", + suffix="_nestml_non_jit", + codegen_opts=non_jit_codegen_opts) + + @pytest.mark.parametrize("delay", [1., 1.5]) + @pytest.mark.parametrize("resolution", [.1, .5, 1.]) + @pytest.mark.parametrize("pre_spike_times,post_spike_times", [ + ([1., 11., 21.], + [6., 16., 26.]), + (np.sort(np.unique(1 + np.round(100 * np.sort(np.abs(np.random.randn(100)))))), + np.sort(np.unique(1 + np.round(100 * np.sort(np.abs(np.random.randn(100))))))), + (np.array([2., 4., 7., 8., 12., 13., 19., 23., 24., 28., 29., 30., 33., 34., + 35., 36., 38., 40., 42., 46., 51., 53., 54., 55., 56., 59., 63., 64., + 65., 66., 68., 72., 73., 76., 79., 80., 83., 84., 86., 87., 90., 95., + 99., 100., 103., 104., 105., 111., 112., 126., 131., 133., 134., 139., 147., 150., + 152., 155., 172., 175., 176., 181., 196., 197., 199., 202., 213., 215., 217., 265.]), + np.array([4., 5., 6., 7., 10., 11., 12., 16., 17., 18., 19., 20., 22., 23., + 25., 27., 29., 30., 31., 32., 34., 36., 37., 38., 39., 42., 44., 46., + 48., 49., 50., 54., 56., 57., 59., 60., 61., 62., 67., 74., 76., 79., + 80., 81., 83., 88., 93., 94., 97., 99., 100., 105., 111., 113., 114., 115., + 116., 119., 123., 130., 132., 134., 135., 145., 152., 155., 158., 166., 172., 174., + 188., 194., 202., 245., 249., 289., 454.])), + (np.array([1, 5, 6, 7, 9, 11, 12, 13, 14.5, 16.1]), + np.array([2, 3, 4, 8, 9, 10, 12, 13.2, 15.1, 16.4])) + ]) + def test_nest_stdp_synapse(self, pre_spike_times: Sequence[float], post_spike_times: Sequence[float], resolution: float, delay: float, fname_snip: str = ""): + self.run_synapse_test(neuron_model_name=self.neuron_model_name, + ref_neuron_model_name=self.ref_neuron_model_name, + synapse_model_name=self.synapse_model_name, + ref_synapse_model_name=self.ref_synapse_model_name, + resolution=resolution, # [ms] + delay=delay, # [ms] + pre_spike_times=pre_spike_times, + post_spike_times=post_spike_times, + fname_snip=fname_snip) + + def run_synapse_test(self, neuron_model_name, + ref_neuron_model_name, + synapse_model_name, + ref_synapse_model_name, + resolution=1., # [ms] + delay=1., # [ms] + sim_time=None, # if None, computed from pre and post spike times + pre_spike_times=None, + post_spike_times=None, + fname_snip=""): + + if pre_spike_times is None: + pre_spike_times = [] + + if post_spike_times is None: + post_spike_times = [] + + if sim_time is None: + sim_time = max(np.amax(pre_spike_times), np.amax(post_spike_times)) + 5 * delay + + nest.set_verbosity("M_ALL") + nest.ResetKernel() + + # load the generated modules into NEST + try: + nest.Install("nestml_jit_module") + except Exception: + # ResetKernel() does not unload modules for NEST Simulator < v3.7; ignore exception if module is already loaded on earlier versions + pass + + try: + nest.Install("nestml_non_jit_module") + except Exception: + # ResetKernel() does not unload modules for NEST Simulator < v3.7; ignore exception if module is already loaded on earlier versions + pass + + print("Pre spike times: " + str(pre_spike_times)) + print("Post spike times: " + str(post_spike_times)) + + # nest.set_verbosity("M_WARNING") + nest.set_verbosity("M_ERROR") + + nest.SetKernelStatus({"resolution": resolution}) + + wr = nest.Create("weight_recorder") + wr_ref = nest.Create("weight_recorder") + nest.CopyModel(synapse_model_name, "stdp_nestml_rec", + {"weight_recorder": wr[0], "w": 1., "d": 1., "receptor_type": 0}) + nest.CopyModel(ref_synapse_model_name, "stdp_ref_rec", + {"weight_recorder": wr_ref[0], "weight": 1., "delay": 1., "receptor_type": 0}) + + # create spike_generators with these times + pre_sg = nest.Create("spike_generator", + params={"spike_times": pre_spike_times, + "allow_offgrid_times": True}) + post_sg = nest.Create("spike_generator", + params={"spike_times": post_spike_times, + "allow_offgrid_times": True}) + + # create parrot neurons and connect spike_generators + if sim_mdl: + pre_neuron = nest.Create("parrot_neuron") + post_neuron = nest.Create(neuron_model_name) + nest.SetStatus(post_neuron, "tau_syn_exc", .2) # [ms] -- very brief + + if sim_ref: + pre_neuron_ref = nest.Create("parrot_neuron") + post_neuron_ref = nest.Create(ref_neuron_model_name) + nest.SetStatus(post_neuron_ref, "tau_syn_exc", .2) # [ms] -- very brief + + if sim_mdl: + if NESTTools.detect_nest_version().startswith("v2"): + spikedet_pre = nest.Create("spike_detector") + spikedet_post = nest.Create("spike_detector") + else: + spikedet_pre = nest.Create("spike_recorder") + spikedet_post = nest.Create("spike_recorder") + mm = nest.Create("multimeter", params={"record_from": [ + "V_m", "post_trace__for_stdp_synapse_nestml"]}) + if sim_ref: + if NESTTools.detect_nest_version().startswith("v2"): + spikedet_pre_ref = nest.Create("spike_detector") + spikedet_post_ref = nest.Create("spike_detector") + else: + spikedet_pre_ref = nest.Create("spike_recorder") + spikedet_post_ref = nest.Create("spike_recorder") + mm_ref = nest.Create("multimeter", params={"record_from": ["V_m"]}) + + if sim_mdl: + nest.Connect(pre_sg, pre_neuron, "one_to_one", syn_spec={"delay": 1.}) + nest.Connect(post_sg, post_neuron, "one_to_one", syn_spec={"delay": 1., "weight": 99999.}) + if NESTTools.detect_nest_version().startswith("v2"): + nest.Connect(pre_neuron, post_neuron, "all_to_all", syn_spec={"model": "stdp_nestml_rec"}) + else: + nest.Connect(pre_neuron, post_neuron, "all_to_all", syn_spec={"synapse_model": "stdp_nestml_rec"}) + nest.Connect(mm, post_neuron) + nest.Connect(pre_neuron, spikedet_pre) + nest.Connect(post_neuron, spikedet_post) + if sim_ref: + nest.Connect(pre_sg, pre_neuron_ref, "one_to_one", syn_spec={"delay": 1.}) + nest.Connect(post_sg, post_neuron_ref, "one_to_one", syn_spec={"delay": 1., "weight": 99999.}) + if NESTTools.detect_nest_version().startswith("v2"): + nest.Connect(pre_neuron_ref, post_neuron_ref, "all_to_all", + syn_spec={"model": ref_synapse_model_name}) + else: + nest.Connect(pre_neuron_ref, post_neuron_ref, "all_to_all", + syn_spec={"synapse_model": ref_synapse_model_name}) + nest.Connect(mm_ref, post_neuron_ref) + nest.Connect(pre_neuron_ref, spikedet_pre_ref) + nest.Connect(post_neuron_ref, spikedet_post_ref) + + # get STDP synapse and weight before protocol + if sim_mdl: + syn = nest.GetConnections(source=pre_neuron, synapse_model="stdp_nestml_rec") + if sim_ref: + syn_ref = nest.GetConnections(source=pre_neuron_ref, synapse_model=ref_synapse_model_name) + + n_steps = int(np.ceil(sim_time / resolution)) + 1 + t = 0. + t_hist = [] + if sim_mdl: + w_hist = [] + if sim_ref: + w_hist_ref = [] + while t <= sim_time: + nest.Simulate(resolution) + t += resolution + t_hist.append(t) + if sim_ref: + w_hist_ref.append(nest.GetStatus(syn_ref)[0]["weight"]) + if sim_mdl: + w_hist.append(nest.GetStatus(syn)[0]["w"]) + + # plot + if TEST_PLOTS: + fig, ax = plt.subplots(nrows=2) + ax1, ax2 = ax + + if sim_mdl: + timevec = nest.GetStatus(mm, "events")[0]["times"] + V_m = nest.GetStatus(mm, "events")[0]["V_m"] + ax2.plot(timevec, nest.GetStatus(mm, "events")[0]["post_trace__for_stdp_synapse_nestml"], label="post_tr nestml") + ax1.plot(timevec, V_m, label="nestml", alpha=.7, linestyle=":") + if sim_ref: + pre_ref_spike_times_ = nest.GetStatus(spikedet_pre_ref, "events")[0]["times"] + timevec = nest.GetStatus(mm_ref, "events")[0]["times"] + V_m = nest.GetStatus(mm_ref, "events")[0]["V_m"] + ax1.plot(timevec, V_m, label="nest ref", alpha=.7) + ax1.set_ylabel("V_m") + + for _ax in ax: + _ax.grid(which="major", axis="both") + _ax.grid(which="minor", axis="x", linestyle=":", alpha=.4) + # _ax.minorticks_on() + _ax.set_xlim(0., sim_time) + _ax.legend() + fig.savefig("/tmp/stdp_synapse_test" + fname_snip + "_V_m.png", dpi=300) + + # plot + if TEST_PLOTS: + fig, ax = plt.subplots(nrows=3) + ax1, ax2, ax3 = ax + + if sim_mdl: + pre_spike_times_ = nest.GetStatus(spikedet_pre, "events")[0]["times"] + print("Actual pre spike times: " + str(pre_spike_times_)) + if sim_ref: + pre_ref_spike_times_ = nest.GetStatus(spikedet_pre_ref, "events")[0]["times"] + print("Actual pre ref spike times: " + str(pre_ref_spike_times_)) + + if sim_mdl: + n_spikes = len(pre_spike_times_) + for i in range(n_spikes): + if i == 0: + _lbl = "nestml" + else: + _lbl = None + ax1.plot(2 * [pre_spike_times_[i] + delay], [0, 1], linewidth=2, color="blue", alpha=.4, label=_lbl) + + if sim_mdl: + post_spike_times_ = nest.GetStatus(spikedet_post, "events")[0]["times"] + print("Actual post spike times: " + str(post_spike_times_)) + if sim_ref: + post_ref_spike_times_ = nest.GetStatus(spikedet_post_ref, "events")[0]["times"] + print("Actual post ref spike times: " + str(post_ref_spike_times_)) + + if sim_ref: + n_spikes = len(pre_ref_spike_times_) + for i in range(n_spikes): + if i == 0: + _lbl = "nest ref" + else: + _lbl = None + ax1.plot(2 * [pre_ref_spike_times_[i] + delay], [0, 1], + linewidth=2, color="cyan", label=_lbl, alpha=.4) + ax1.set_ylabel("Pre spikes") + + if sim_mdl: + n_spikes = len(post_spike_times_) + for i in range(n_spikes): + if i == 0: + _lbl = "nestml" + else: + _lbl = None + ax2.plot(2 * [post_spike_times_[i]], [0, 1], linewidth=2, color="black", alpha=.4, label=_lbl) + if sim_ref: + n_spikes = len(post_ref_spike_times_) + for i in range(n_spikes): + if i == 0: + _lbl = "nest ref" + else: + _lbl = None + ax2.plot(2 * [post_ref_spike_times_[i]], [0, 1], linewidth=2, color="red", alpha=.4, label=_lbl) + ax2.plot(timevec, nest.GetStatus(mm, "events")[0]["post_trace__for_stdp_synapse_nestml"], label="nestml post tr") + ax2.set_ylabel("Post spikes") + + if sim_mdl: + ax3.plot(t_hist, w_hist, marker="o", label="nestml") + if sim_ref: + ax3.plot(t_hist, w_hist_ref, linestyle="--", marker="x", label="ref") + + ax3.set_xlabel("Time [ms]") + ax3.set_ylabel("w") + for _ax in ax: + _ax.grid(which="major", axis="both") + _ax.xaxis.set_major_locator(matplotlib.ticker.FixedLocator(np.arange(0, np.ceil(sim_time)))) + _ax.set_xlim(0., sim_time) + _ax.legend() + fig.savefig("/tmp/stdp_synapse_test" + fname_snip + ".png", dpi=300) + # verify + MAX_ABS_ERROR = 1E-6 + assert np.any(np.abs(np.array(w_hist) - 1) > MAX_ABS_ERROR), "No change in the weight!" + assert np.all(np.abs(np.array(w_hist) - np.array(w_hist_ref)) < MAX_ABS_ERROR), \ + "Difference between NESTML model and reference model!"