Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@
figures/
files/*pkl
files/*npz
files/results/
scripts/output/
scripts/var/
scripts/optimization/Mishra_2016/
scripts/optimization/checkpoints/
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
## Data-driven network model of CA3 - featuring sequence replay and ripple oscillation

Code repository of the preprint: https://www.biorxiv.org/content/10.1101/2021.02.18.431868v1
Code repository of the [eLife article _"Hippocampal sharp wave-ripples and the associated sequence replay emerge from structured synaptic interactions in a network model of area CA3"_](https://elifesciences.org/articles/71850)

To run:

Expand Down
11,000 changes: 11,000 additions & 0 deletions files/BC_traces.txt

Large diffs are not rendered by default.

5,500 changes: 5,500 additions & 0 deletions files/PC_traces.txt

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,5 +4,6 @@ matplotlib
seaborn
pandas
brian2
bluepyopt
PyWavelets
tqdm
221 changes: 221 additions & 0 deletions scripts/gamma_network.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,221 @@
# -*- coding: utf8 -*-
"""
Mostly a copy-paste of `spw_network.py` but this one is optimized for gamma oscillation
(Creates AdExpIF PC and BC populations in Brian2, loads in recurrent connection matrix for PC population
runs simulation and checks the dynamics)
authors: András Ecker, Szabolcs Káli last update: 03.2021
"""

import os
import numpy as np
import random as pyrandom
from brian2 import *
prefs.codegen.target = "numpy"
import matplotlib.pyplot as plt
from helper import load_wmx, save_vars
from spw_network import analyse_results


base_path = os.path.sep.join(os.path.abspath("__file__").split(os.path.sep)[:-2])

# ----- base parameters are COPY-PASTED from `spw_network.py` -----
# population size
nPCs = 8000
nBCs = 150
# sparseness
connection_prob_PC = 0.1
connection_prob_BC = 0.25

# synaptic time constants:
# rise time constants
rise_PC_E = 1.3 * ms # Guzman 2016 (only from Fig.1 H - 20-80%)
rise_PC_MF = 0.65 * ms # Vyleta ... Jonas 2016 (20-80%)
rise_PC_I = 0.3 * ms # Bartos 2002 (20-80%)
rise_BC_E = 1. * ms # Lee 2014 (data from CA1)
rise_BC_I = 0.25 * ms # Bartos 2002 (20-80%)
# decay time constants
decay_PC_E = 9.5 * ms # Guzman 2016 ("needed for temporal summation of EPSPs")
decay_PC_MF = 5.4 * ms # Vyleta ... Jonas 2016
decay_PC_I = 3.3 * ms # Bartos 2002
decay_BC_E = 4.1 * ms # Lee 2014 (data from CA1)
decay_BC_I = 1.2 * ms # Bartos 2002
# Normalization factors (normalize the peak of the PSC curve to 1)
tp = (decay_PC_E * rise_PC_E)/(decay_PC_E - rise_PC_E) * np.log(decay_PC_E/rise_PC_E) # time to peak
norm_PC_E = 1.0 / (np.exp(-tp/decay_PC_E) - np.exp(-tp/rise_PC_E))
tp = (decay_PC_MF * rise_PC_MF)/(decay_PC_MF - rise_PC_MF) * np.log(decay_PC_MF/rise_PC_MF)
norm_PC_MF = 1.0 / (np.exp(-tp/decay_PC_MF) - np.exp(-tp/rise_PC_MF))
tp = (decay_PC_I * rise_PC_I)/(decay_PC_I - rise_PC_I) * np.log(decay_PC_I/rise_PC_I)
norm_PC_I = 1.0 / (np.exp(-tp/decay_PC_I) - np.exp(-tp/rise_PC_I))
tp = (decay_BC_E * rise_BC_E)/(decay_BC_E - rise_BC_E) * np.log(decay_BC_E/rise_BC_E)
norm_BC_E = 1.0 / (np.exp(-tp/decay_BC_E) - np.exp(-tp/rise_BC_E))
tp = (decay_BC_I * rise_BC_I)/(decay_BC_I - rise_BC_I) * np.log(decay_BC_I/rise_BC_I)
norm_BC_I = 1.0 / (np.exp(-tp/decay_BC_I) - np.exp(-tp/rise_BC_I))
# synaptic delays:
delay_PC_E = 2.2 * ms # Guzman 2016
delay_PC_I = 1.1 * ms # Bartos 2002
delay_BC_E = 0.9 * ms # Geiger 1997 (data from DG)
delay_BC_I = 0.6 * ms # Bartos 2002
# synaptic reversal potentials
Erev_E = 0.0 * mV
Erev_I = -70.0 * mV

z = 1 * nS
# AdExpIF parameters for PCs (re-optimized by Szabolcs)
g_leak_PC = 4.31475791937223 * nS
tau_mem_PC = 41.7488927175169 * ms
Cm_PC = tau_mem_PC * g_leak_PC
Vrest_PC = -75.1884554193901 * mV
Vreset_PC = -29.738747396665072 * mV
theta_PC = -24.4255910105977 * mV
tref_PC = 5.96326930945599 * ms
delta_T_PC = 4.2340696257631 * mV
spike_th_PC = theta_PC + 5 * delta_T_PC
a_PC = -0.274347065652738 * nS
b_PC = 206.841448096415 * pA
tau_w_PC = 84.9358017225512 * ms
""" comment this back to run with ExpIF PC model...
# ExpIF parameters for PCs (optimized by Szabolcs)
g_leak_PC = 4.88880734814042 * nS
tau_mem_PC = 70.403501012992 * ms
Cm_PC = tau_mem_PC * g_leak_PC
Vrest_PC = -76.59966923496779 * mV
Vreset_PC = -58.8210432444992 * mV
theta_PC = -28.7739788756 * mV
tref_PC = 1.07004414539699 * ms
delta_T_PC = 10.7807538634886 * mV
spike_th_PC = theta_PC + 5 * delta_T_PC
a_PC = 0. * nS
b_PC = 0. * pA
tau_w_PC = 1 * ms
"""
# parameters for BCs (re-optimized by Szabolcs)
g_leak_BC = 7.51454086502288 * nS
tau_mem_BC = 15.773412296065 * ms
Cm_BC = tau_mem_BC * g_leak_BC
Vrest_BC = -74.74167987795019 * mV
Vreset_BC = -64.99190523539687 * mV
theta_BC = -57.7092044103536 * mV
tref_BC = 1.15622717832178 * ms
delta_T_BC = 4.58413312063091 * mV
spike_th_BC = theta_BC + 5 * delta_T_BC
a_BC = 3.05640210724374 * nS
b_BC = 0.916098931234532 * pA
tau_w_BC = 178.581099914024 * ms

eqs_PC = """
dvm/dt = (-g_leak_PC*(vm-Vrest_PC) + g_leak_PC*delta_T_PC*exp((vm- theta_PC)/delta_T_PC) - w + depol_ACh - ((g_ampa+g_ampaMF)*z*(vm-Erev_E) + g_gaba*z*(vm-Erev_I)))/Cm_PC : volt (unless refractory)
dw/dt = (a_PC*(vm-Vrest_PC) - w) / tau_w_PC : amp
dg_ampa/dt = (x_ampa - g_ampa) / rise_PC_E : 1
dx_ampa/dt = -x_ampa / decay_PC_E : 1
dg_ampaMF/dt = (x_ampaMF - g_ampaMF) / rise_PC_MF : 1
dx_ampaMF/dt = -x_ampaMF / decay_PC_MF : 1
dg_gaba/dt = (x_gaba - g_gaba) / rise_PC_I : 1
dx_gaba/dt = -x_gaba/decay_PC_I : 1
depol_ACh: amp
"""

eqs_BC = """
dvm/dt = (-g_leak_BC*(vm-Vrest_BC) + g_leak_BC*delta_T_BC*exp((vm- theta_BC)/delta_T_BC) - w - (g_ampa*z*(vm-Erev_E) + g_gaba*z*(vm-Erev_I)))/Cm_BC : volt (unless refractory)
dw/dt = (a_BC*(vm-Vrest_BC) - w) / tau_w_BC : amp
dg_ampa/dt = (x_ampa - g_ampa) / rise_BC_E : 1
dx_ampa/dt = -x_ampa/decay_BC_E : 1
dg_gaba/dt = (x_gaba - g_gaba) / rise_BC_I : 1
dx_gaba/dt = -x_gaba/decay_BC_I : 1
"""

# ----- NEW parameters (optimized for gamma oscillation) compared to `spw_network.py` -----
# synaptic weights
w_PC_E_scale = 0.19
w_PC_I = 0.24 # nS
w_BC_E = 0.42
w_BC_I = 1.12
w_PC_MF = 22.4
# mossy fiber input freq
rate_MF = 17.4 * Hz


def run_simulation(wmx_PC_E, save, seed, verbose=True):
"""
Sets up the network and runs simulation
:param wmx_PC_E: np.array representing the recurrent excitatory synaptic weight matrix
:param save: bool flag to save PC spikes after the simulation (used by `bayesian_decoding.py` later)
:param seed: random seed used for running the simulation
:param verbose: bool flag to report status of simulation
:return SM_PC, SM_BC, RM_PC, RM_BC, selection, StateM_PC, StateM_BC: Brian2 monitors (+ array of selected cells used by multi state monitor)
"""

np.random.seed(seed)
pyrandom.seed(seed)

PCs = NeuronGroup(nPCs, model=eqs_PC, threshold="vm>spike_th_PC",
reset="vm=Vreset_PC; w+=b_PC", refractory=tref_PC, method="exponential_euler")
PCs.vm = Vrest_PC; PCs.g_ampa = 0.0; PCs.g_ampaMF = 0.0; PCs.g_gaba = 0.0
PCs.depol_ACh = 40 * pA # ACh induced ~10 mV depolarization in PCs...

BCs = NeuronGroup(nBCs, model=eqs_BC, threshold="vm>spike_th_BC",
reset="vm=Vreset_BC; w+=b_BC", refractory=tref_BC, method="exponential_euler")
BCs.vm = Vrest_BC; BCs.g_ampa = 0.0; BCs.g_gaba = 0.0

MF = PoissonGroup(nPCs, rate_MF)
C_PC_MF = Synapses(MF, PCs, on_pre="x_ampaMF+=norm_PC_MF*w_PC_MF")
C_PC_MF.connect(j="i")

# weight matrix used here
C_PC_E = Synapses(PCs, PCs, "w_exc:1", on_pre="x_ampa+=norm_PC_E*w_exc", delay=delay_PC_E)
C_PC_E.connect(i=wmx_PC_E.row, j=wmx_PC_E.col)
C_PC_E.w_exc = wmx_PC_E.data
del wmx_PC_E

C_PC_I = Synapses(BCs, PCs, on_pre="x_gaba+=norm_PC_I*w_PC_I", delay=delay_PC_I)
C_PC_I.connect(p=connection_prob_BC)

C_BC_E = Synapses(PCs, BCs, on_pre="x_ampa+=norm_BC_E*w_BC_E", delay=delay_BC_E)
C_BC_E.connect(p=connection_prob_PC)

C_BC_I = Synapses(BCs, BCs, on_pre="x_gaba+=norm_BC_I*w_BC_I", delay=delay_BC_I)
C_BC_I.connect(p=connection_prob_BC)

SM_PC = SpikeMonitor(PCs)
SM_BC = SpikeMonitor(BCs)
RM_PC = PopulationRateMonitor(PCs)
RM_BC = PopulationRateMonitor(BCs)

selection = np.arange(0, nPCs, 20) # subset of neurons for recoring variables
StateM_PC = StateMonitor(PCs, variables=["vm", "w", "g_ampa", "g_ampaMF", "g_gaba"],
record=selection.tolist(), dt=0.1*ms)
StateM_BC = StateMonitor(BCs, "vm", record=[nBCs/2], dt=0.1*ms)

if verbose:
run(10000*ms, report="text")
else:
run(10000*ms)

if save:
save_vars(SM_PC, RM_PC, StateM_PC, selection, seed)

return SM_PC, SM_BC, RM_PC, RM_BC, selection, StateM_PC, StateM_BC


if __name__ == "__main__":
seed = 12345
save = False
verbose = True

f_in = "wmx_sym_0.5_linear.npz"
wmx_PC_E = load_wmx(os.path.join(base_path, "files", f_in)) * w_PC_E_scale

SM_PC, SM_BC, RM_PC, RM_BC, selection, StateM_PC, StateM_BC = run_simulation(wmx_PC_E,
save=save, seed=seed, verbose=verbose)
results = analyse_results(SM_PC, SM_BC, RM_PC, RM_BC, selection, StateM_PC, StateM_BC, seed=seed,
multiplier=1, linear=True, pklf_name=None, dir_name=None,
analyse_replay=False, TFR=False, save=save, verbose=False)
if verbose: # bypassing `verbose=True` in `analyse_results` with gamma related metrics
print("Mean excitatory rate: %.3f" % results[2])
print("Mean inhibitory rate: %.3f" % results[3])
print("Average exc. gamma freq: %.3f" % results[10])
print("Exc. gamma power: %.3f" % results[11])
print("Average inh. gamma freq: %.3f" % results[12])
print("Inh. gamma power: %.3f" % results[13])
print("Average LFP gamma freq: %.3f" % results[14])
print("LFP gamma power: %.3f" % results[15])
plt.show()
7 changes: 4 additions & 3 deletions scripts/generate_spike_train.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
t_max = 405.0 # [s]


def generate_spike_train(n_neurons, place_cell_ratio, linear, ordered=True, seed=1234):
def generate_spike_train(n_neurons, place_cell_ratio, linear, ordered=True, seed=8888):
"""
Generates hippocampal like spike trains (used later for learning the weights via STDP)
:param n_neurons: #{neurons}
Expand Down Expand Up @@ -76,7 +76,7 @@ def generate_spike_train(n_neurons, place_cell_ratio, linear, ordered=True, seed
spike_train = hom_poisson(outfield_rate, 100, t_max, seed)
spike_trains.append(spike_train)
seed += 1

return spike_trains


Expand All @@ -93,4 +93,5 @@ def generate_spike_train(n_neurons, place_cell_ratio, linear, ordered=True, seed
spike_trains = refractoriness(spike_trains) # clean spike train (based on refractory period)

npzf_name = os.path.join(base_path, "files", f_out)
np.savez(npzf_name, spike_trains=spike_trains)
np.savez(npzf_name, *spike_trains)

45 changes: 27 additions & 18 deletions scripts/helper.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,15 @@
# -*- coding: utf8 -*-
"""
Helper functions used here and there
author: András Ecker, last update: 06.2019
author: András Ecker, last update: 06.2022
"""

import os
from shutil import rmtree
import pickle
from copy import deepcopy
import numpy as np
from scipy.sparse import coo_matrix, save_npz, load_npz
import pywt
from brian2.units import *
from poisson_proc import hom_poisson, get_tuning_curve_linear
Expand Down Expand Up @@ -85,7 +87,7 @@ def _avg_rate(rate, bin_, zoomed=False):
t0 = 0 if not zoomed else 9900
t1 = np.arange(t0, len_sim, bin_)
t2 = t1 + bin_
avg_rate = np.zeros_like(t1, dtype=np.float)
avg_rate = np.zeros_like(t1, dtype=float)
for i, (t1_, t2_) in enumerate(zip(t1, t2)):
avg_ = np.mean(rate[np.where((t1_ <= t) & (t < t2_))])
if avg_ != 0.:
Expand Down Expand Up @@ -166,6 +168,18 @@ def reorder_spiking_neurons(spiking_neurons, pklf_name_tuning_curves):
# ========== saving & loading ==========


def create_dir(dir_name):
"""
Deletes dir (if exists) and creates a new one with
:param dir_name: string: full path of the directory to be created
"""
if os.path.isdir(dir_name):
rmtree(dir_name)
os.mkdir(dir_name)
else:
os.mkdir(dir_name)


def save_place_fields(place_fields, pklf_name):
"""
Save place field starts and corresponding neuron IDs for further analysis (see `bayesian_decoding.py`)
Expand Down Expand Up @@ -301,29 +315,24 @@ def save_gavg_step_sizes(step_sizes, phases, avg_step_sizes, seeds, f_name="gavg
pickle.dump(results, f, protocol=pickle.HIGHEST_PROTOCOL)


def save_wmx(weightmx, pklf_name):
def save_wmx(weightmx, npzf_name):
"""
Saves excitatory weight matrix
Saves excitatory weight matrix (as a `scipy.sparse` matrix)
:param weightmx: synaptic weight matrix to save
:param pklf_name: file name of the saved weight matrix
:param npzf_name: file name of the saved weight matrix
"""
np.fill_diagonal(weightmx, 0.0) # just to make sure
sparse_weightmx = coo_matrix(weightmx) # convert to COO
save_npz(npzf_name, sparse_weightmx)

np.fill_diagonal(weightmx, 0.0)
with open(pklf_name, "wb") as f:
pickle.dump(weightmx, f, protocol=pickle.HIGHEST_PROTOCOL)


def load_wmx(pklf_name):
def load_wmx(npzf_name):
"""
Dummy function to load in the excitatory weight matrix and make python clear the memory
:param pklf_name: file name of the saved weight matrix
:return: wmx_PC_E: excitatory weight matrix
:param npzf_name: file name of the saved weight matrix
:return: excitatory weight matrix
"""

with open(pklf_name, "rb") as f:
wmx_PC_E = pickle.load(f, encoding="latin1")

return wmx_PC_E
return load_npz(npzf_name)


def load_spikes(pklf_name):
Expand Down Expand Up @@ -358,7 +367,7 @@ def load_spike_trains(npzf_name):
"""

npz_f = np.load(npzf_name, allow_pickle=True)
spike_trains = npz_f["spike_trains"]
spike_trains = [npz_f[i] for i in npz_f]

spiking_neurons = 0 * np.ones_like(spike_trains[0])
spike_times = np.asarray(spike_trains[0])
Expand Down
Loading