From ac05aa64cd824eb4c0a721e467b30bed46fd20dc Mon Sep 17 00:00:00 2001 From: Andrew Davison Date: Tue, 1 Apr 2025 18:02:02 +0200 Subject: [PATCH 1/2] Start testing pyNN.arbor --- .github/workflows/full-test.yml | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/.github/workflows/full-test.yml b/.github/workflows/full-test.yml index 8482ff5f..f62789e0 100644 --- a/.github/workflows/full-test.yml +++ b/.github/workflows/full-test.yml @@ -50,9 +50,13 @@ jobs: cmake -DCMAKE_INSTALL_PREFIX=$HOME/.local -Dwith-mpi=ON ./nest-simulator-3.8 make make install + - name: Install Arbor + if: startsWith(matrix.os, 'ubuntu') + run: | + python -m pip install arbor==0.9.0 libNeuroML - name: Install PyNN itself run: | - pip install -e ".[test]" + python -m pip install -e ".[test]" - name: Test installation has worked (Ubuntu) # this is needed because the PyNN tests are just skipped if the simulator # fails to install, so we need to catch import failures separately @@ -61,6 +65,7 @@ jobs: python -c "import pyNN.nest" python -c "import pyNN.neuron" python -c "import pyNN.brian2" + python -c "import pyNN.arbor" - name: Test installation has worked (Windows) if: startsWith(matrix.os, 'windows') run: | From c21ebb0e5d6461a1524e846414a6d81d5a57449d Mon Sep 17 00:00:00 2001 From: Andrew Davison Date: Tue, 1 Apr 2025 22:42:48 +0200 Subject: [PATCH 2/2] Added first test for multicompartmental models --- pyNN/arbor/cells.py | 4 +- pyNN/arbor/populations.py | 2 + pyNN/arbor/simulator.py | 11 ++- pyNN/arbor/standardmodels.py | 9 +- pyNN/common/populations.py | 9 +- pyNN/neuron/standardmodels/electrodes.py | 2 +- test/system/scenarios/fixtures.py | 6 ++ test/system/scenarios/test_scenario5.py | 100 +++++++++++++++++++++++ 8 files changed, 134 insertions(+), 9 deletions(-) create mode 100644 test/system/scenarios/test_scenario5.py diff --git a/pyNN/arbor/cells.py b/pyNN/arbor/cells.py index 82f70ae3..b6ecdd01 100644 --- a/pyNN/arbor/cells.py +++ b/pyNN/arbor/cells.py @@ -125,7 +125,9 @@ def _build_decor(self, i): location_generator = current_source["location_generator"] mechanism = getattr(arbor, current_source["model_name"]) for locset, label in location_generator.generate_locations(morph, label=f"{current_source['model_name']}_label"): - decor.place(locset, mechanism(**current_source["parameters"].evaluate()), label) + params = current_source["parameters"].evaluate(simplify=True) + mech = mechanism(**params) + decor.place(locset, mech, label) # add spike source decor.place('"root"', arbor.threshold_detector(-10), "detector") diff --git a/pyNN/arbor/populations.py b/pyNN/arbor/populations.py index 16bea021..f6aee0bf 100644 --- a/pyNN/arbor/populations.py +++ b/pyNN/arbor/populations.py @@ -117,6 +117,8 @@ def _create_cells(self): [simulator.Cell(id) for id in id_range], dtype=simulator.Cell ) + for obj in self.all_cells: + obj.parent = self # for i, cell in enumerate(self.all_cells): # #for key, value in parameter_space.items(): diff --git a/pyNN/arbor/simulator.py b/pyNN/arbor/simulator.py index 3750f981..0b2a1a44 100644 --- a/pyNN/arbor/simulator.py +++ b/pyNN/arbor/simulator.py @@ -35,12 +35,17 @@ def build_mechanisms(): return mech_path -class Cell(int): +class Cell(int, common.IDMixin): + local = True def __init__(self, n): """Create an ID object with numerical value `n`.""" - self.gid = n - self.local = True + int.__init__(n) + common.IDMixin.__init__(self) + + @property + def gid(self): + return int(self) class NetworkRecipe(arbor.recipe): diff --git a/pyNN/arbor/standardmodels.py b/pyNN/arbor/standardmodels.py index a8eb5c37..4bc65d6a 100644 --- a/pyNN/arbor/standardmodels.py +++ b/pyNN/arbor/standardmodels.py @@ -8,6 +8,7 @@ import logging from copy import deepcopy +import numpy as np import arbor from ..standardmodels import cells, ion_channels, synapses, electrodes, receptors, build_translations @@ -60,9 +61,15 @@ def inject_into(self, cells, location=None): # rename to `locations` ? if hasattr(cells, "parent"): cell_descr = cells.parent._arbor_cell_description.base_value index = cells.parent.id_to_index(cells.all_cells.astype(int)) - else: + elif hasattr(cells, "_arbor_cell_description"): cell_descr = cells._arbor_cell_description.base_value index = cells.id_to_index(cells.all_cells.astype(int)) + else: + assert isinstance(cells, (list, tuple)) + # we're assuming all cells have the same parent here + cell_descr = cells[0].parent._arbor_cell_description.base_value + index = np.array(cells, dtype=int) + self.parameter_space.shape = (1,) if location is None: raise NotImplementedError diff --git a/pyNN/common/populations.py b/pyNN/common/populations.py index 0942c564..d5851619 100644 --- a/pyNN/common/populations.py +++ b/pyNN/common/populations.py @@ -46,7 +46,7 @@ class IDMixin(object): where p is a Population object. """ # Simulator ID classes should inherit both from the base type of the ID - # (e.g., int or long) and from IDMixin. + # (e.g., int) and from IDMixin. def __getattr__(self, name): if name == "parent": @@ -128,9 +128,12 @@ def _get_position(self): def local(self): return self.parent.is_local(self) - def inject(self, current_source): + def inject(self, current_source, location=None): """Inject current from a current source object into the cell.""" - current_source.inject_into([self]) + if location is None: + current_source.inject_into([self]) + else: + current_source.inject_into([self], location=location) def get_initial_value(self, variable): """Get the initial value of a state variable of the cell.""" diff --git a/pyNN/neuron/standardmodels/electrodes.py b/pyNN/neuron/standardmodels/electrodes.py index ab83201a..0bfd61ed 100644 --- a/pyNN/neuron/standardmodels/electrodes.py +++ b/pyNN/neuron/standardmodels/electrodes.py @@ -147,7 +147,7 @@ def inject_into(self, cells, location=None): pass else: raise TypeError("location must be a string or a LocationGenerator") - morphology = cells.celltype.parameter_space["morphology"].base_value # todo: evaluate lazyarray + morphology = id.celltype.parameter_space["morphology"].base_value # todo: evaluate lazyarray locations = location.generate_locations(morphology, label_prefix="dc_current_source", cell=id._cell) sections = [] for loc in locations: diff --git a/test/system/scenarios/fixtures.py b/test/system/scenarios/fixtures.py index bb42d280..514bf788 100644 --- a/test/system/scenarios/fixtures.py +++ b/test/system/scenarios/fixtures.py @@ -21,6 +21,12 @@ except ImportError: pass +try: + import pyNN.arbor + available_modules["arbor"] = pyNN.arbor +except ImportError: + pass + class SimulatorNotAvailable: diff --git a/test/system/scenarios/test_scenario5.py b/test/system/scenarios/test_scenario5.py new file mode 100644 index 00000000..0e41ceb3 --- /dev/null +++ b/test/system/scenarios/test_scenario5.py @@ -0,0 +1,100 @@ +import sys +import numpy as np + +try: + from neuroml import Morphology, Segment, Point3DWithDiam as P + have_neuroml = True +except ImportError: + have_neuroml = False + +from pyNN.utility import init_logging +from pyNN.morphology import NeuroMLMorphology +from pyNN.parameters import IonicSpecies + +import pytest + +from .fixtures import run_with_simulators + + +@run_with_simulators("arbor", "neuron") +def test_scenario5(sim): + """ + Array of multi-compartment neurons, each injected with a different current. + """ + if not have_neuroml: + pytest.skip("libNeuroML not available") + + init_logging(logfile=None, debug=True) + + sim.setup(timestep=0.01) + + soma = Segment(proximal=P(x=18.8, y=0, z=0, diameter=18.8), + distal=P(x=0, y=0, z=0, diameter=18.8), + name="soma", id=0) + dend = Segment(proximal=P(x=0, y=0, z=0, diameter=2), + distal=P(x=-500, y=0, z=0, diameter=2), + name="dendrite", + parent=soma, id=1) + + cell_class = sim.MultiCompartmentNeuron + cell_class.label = "ExampleMultiCompartmentNeuron" + cell_class.ion_channels = {'pas': sim.PassiveLeak, 'na': sim.NaChannel, 'kdr': sim.KdrChannel} + + cell_type = cell_class( + morphology=NeuroMLMorphology(Morphology(segments=(soma, dend))), + cm=1.0, # mF / cm**2 + Ra=500.0, # ohm.cm + ionic_species={ + "na": IonicSpecies("na", reversal_potential=50.0), + "k": IonicSpecies("k", reversal_potential=-77.0) + }, + pas={"conductance_density": sim.morphology.uniform('all', 0.0003), + "e_rev":-54.3}, + na={"conductance_density": sim.morphology.uniform('soma', 0.120)}, + kdr={"conductance_density": sim.morphology.uniform('soma', 0.036)} + ) + + neurons = sim.Population(5, cell_type, initial_values={'v': -60.0}) + + I = (0.04, 0.11, 0.13, 0.15, 0.18) + currents = [sim.DCSource(start=50, stop=150, amplitude=amp) + for amp in I] + for j, (neuron, current) in enumerate(zip(neurons, currents)): + if j % 2 == 0: # these should + neuron.inject(current, location="soma") # be entirely + else: # equivalent + current.inject_into([neuron], location="soma") + + neurons.record('spikes') + + sim.run(200.0) + + spiketrains = neurons.get_data().segments[0].spiketrains + assert len(spiketrains) == 5 + assert len(spiketrains[0]) == 0 # first cell does not fire + # expected values taken from the average of simulations with NEURON and Arbor + expected_spike_times = [ + np.array([]), + np.array([52.41]), + np.array([52.15, 68.45, 84.73, 101.02, 117.31, 133.61, 149.9]), + np.array([51.96, 67.14, 82.13, 97.11, 112.08, 127.06, 142.04]), + np.array([51.75, 65.86, 79.7, 93.51, 107.33, 121.14, 134.96, 148.77]) + ] + spike_times = [np.array(st) for st in spiketrains[1:]] + max_error = 0 + for a, b in zip(spike_times, expected_spike_times[1:]): + if a.size == b.size: + max_error += abs((a - b) / b).max() + else: + max_error += 1 + print("max error =", max_error) + assert max_error < 0.005, max_error + sim.end() + if "pytest" not in sys.modules: + return a, b, spike_times + + +if __name__ == '__main__': + from pyNN.utility import get_simulator + sim, args = get_simulator() + test_scenario5(sim)