Skip to content

Commit

Permalink
resolve pylinting issues
Browse files Browse the repository at this point in the history
  • Loading branch information
jessica-mitchell committed Aug 8, 2024
1 parent 051aba9 commit 62f81e9
Show file tree
Hide file tree
Showing 5 changed files with 70 additions and 75 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -58,26 +58,24 @@ def generate_reference_data(neuron_model="ignore_and_fire"):
model_instance.create()
model_instance.connect()

## connectivity at start of simulation
subset_size = 2000 ## number of pre- and post-synaptic neurons weights are extracted from
# connectivity at start of simulation
subset_size = 2000 # number of pre- and post-synaptic neurons weights are extracted from
pop_pre = model_instance.nodes["pop_E"][:subset_size]
pop_post = model_instance.nodes["pop_E"][:subset_size]
C = model_instance.get_connectivity(
pop_pre, pop_post, model_instance.pars["data_path"] + "/" + "connectivity_presim.dat"
)

## simulate
# simulate
model_instance.simulate(model_instance.pars["T"])

## save parameters to file
# save parameters to file
model_instance.save_parameters("model_instance_parameters", model_instance.pars["data_path"])

## connectivity at end of simulation
# connectivity at end of simulation
C = model_instance.get_connectivity(
pop_pre, pop_post, model_instance.pars["data_path"] + "/" + "connectivity_postsim.dat"
)

return


generate_reference_data(neuron_model=sys.argv[1])
Original file line number Diff line number Diff line change
Expand Up @@ -147,53 +147,53 @@ def plot_weight_distributions(whist_presim, whist_postsim, weights, pars, path="
def generate_reference_figures(neuron_model="ignore_and_fire"):
"""Generate and store set of reference data"""

## raster plot
# raster plot
parameters = model.get_default_parameters()

parameters["neuron_model"] = neuron_model

parameters["record_spikes"] = True
parameters["record_weights"] = True

## fetch node ids
# fetch node ids
model_instance = model.Model(parameters)
model_instance.create()

## create subfolder for figures (if necessary)
# create subfolder for figures (if necessary)
os.system("mkdir -p " + model_instance.pars["data_path"])

## load spikes from reference data
# load spikes from reference data
spikes = model.load_spike_data(
model_instance.pars["data_path"],
"spikes-%d" % (np.array(model_instance.nodes["spike_recorder"])[0]),
)
# plot_spikes(spikes, model_instance.nodes, model_instance.pars, model_instance.pars["data_path"])
plot_spikes(spikes, model_instance.nodes, model_instance.pars, "figures")

## load connectivity from reference data
# load connectivity from reference data
connectivity_presim = model.load_connectivity_data(model_instance.pars["data_path"], "connectivity_presim")
connectivity_postsim = model.load_connectivity_data(model_instance.pars["data_path"], "connectivity_postsim")

## create connectivity matrices before and after simulation for a subset of neurons
# create connectivity matrices before and after simulation for a subset of neurons
subset_size = 100
pop_pre = np.array(model_instance.nodes["pop_E"])[:subset_size]
pop_post = np.array(model_instance.nodes["pop_E"])[:subset_size]
W_presim, pop_pre, pop_post = model.get_connectivity_matrix(connectivity_presim, pop_pre, pop_post)
W_postsim, pop_pre, pop_post = model.get_connectivity_matrix(connectivity_postsim, pop_pre, pop_post)

## plot connectivity matrices
# plot connectivity matrices
# plot_connectivity_matrix(W_presim, pop_pre, pop_post, "_presim", model_instance.pars["data_path"])
# plot_connectivity_matrix(W_postsim, pop_pre, pop_post, "_postsim", model_instance.pars["data_path"])
plot_connectivity_matrix(W_presim, pop_pre, pop_post, model_instance.pars, "_presim", "figures")
plot_connectivity_matrix(W_postsim, pop_pre, pop_post, model_instance.pars, "_postsim", "figures")

## compute weight distributions
# compute weight distributions
# weights = np.arange(29.5,34.1,0.05)
weights = np.arange(0.0, 150.1, 0.5)
whist_presim = model.get_weight_distribution(connectivity_presim, weights)
whist_postsim = model.get_weight_distribution(connectivity_postsim, weights)

## plot weight distributions
# plot weight distributions
# plot_weight_distributions(whist_presim, whist_postsim, weights, model_instance.pars["data_path"])
plot_weight_distributions(whist_presim, whist_postsim, weights, model_instance.pars, "figures")

Expand Down
65 changes: 32 additions & 33 deletions pynest/examples/ignore_and_fire/model-ignore_and_fire.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,8 @@ def __init__(self, parameters):
Returns
-------
none
"""

print("\nInitialising model and simulation...")
Expand Down Expand Up @@ -143,6 +145,9 @@ def __init__(self, parameters):
elif self.pars["neuron_model"] == "ignore_and_fire":
self.__neuron_params = {}

else:
return None

def __derived_parameters(self, parameters):
"""
Set additional parameters derived from base parameters.
Expand Down Expand Up @@ -185,8 +190,6 @@ def __derived_parameters(self, parameters):
if self.pars["N_rec_spikes"] == "all":
self.pars["N_rec_spikes"] = self.pars["N"]

return

def create(self):
"""
Create and configure all network nodes (neurons + recording and stimulus devices),
Expand All @@ -207,10 +210,12 @@ def create(self):
nest.GetLocalNodeCollection(pop_all).V_m = random_vm
elif self.pars["neuron_model"] == "ignore_and_fire":
# pop_all = nest.Create("ignore_and_fire", self.pars["N"]) # overall population
# pop_all.rate = np.random.uniform(low=self.pars["ignore_and_fire_pars"]["rate_dist"][0],high=self.pars["ignore_and_fire_pars"]["rate_dist"][1],size=self.pars["N"])
# pop_all.phase = np.random.uniform(low=self.pars["ignore_and_fire_pars"]["phase_dist"][0],high=self.pars["ignore_and_fire_pars"]["phase_dist"][1],size=self.pars["N"])
# pop_all.rate = np.random.uniform(low=self.pars["ignore_and_fire_pars"]["rate_dist"][0],high=self.pars
# ["ignore_and_fire_pars"]["rate_dist"][1],size=self.pars["N"])
# pop_all.phase = np.random.uniform(low=self.pars["ignore_and_fire_pars"]["phase_dist"][0],
# high=self.pars["ignore_and_fire_pars"]["phase_dist"][1],size=self.pars["N"])

## better, but not working yet:
# better, but not working yet:
pop_all.rate = nest.random.uniform(
min=self.pars["ignore_and_fire_pars"]["rate_dist"][0],
max=self.pars["ignore_and_fire_pars"]["rate_dist"][1],
Expand Down Expand Up @@ -265,24 +270,22 @@ def create(self):
self.nodes["spike_recorder"] = spike_recorder
# self.nodes["weight_recorder"] = weight_recorder

return

def connect(self):
"""
Connect network and devices.
"""

print("\nConnecting network and devices...")
## fetch neuron populations and device ids
# fetch neuron populations and device ids
pop_all = self.nodes["pop_all"]
pop_E = self.nodes["pop_E"]
pop_I = self.nodes["pop_I"]
poisson = self.nodes["poisson"]
spike_recorder = self.nodes["spike_recorder"]

# connect network
## EE connections (plastic)
# EE connections (plastic)
nest.Connect(
pop_E,
pop_E,
Expand All @@ -295,7 +298,7 @@ def connect(self):
syn_spec="excitatory_plastic",
)

## EI connections (static)
# EI connections (static)
nest.Connect(
pop_E,
pop_I,
Expand All @@ -308,7 +311,7 @@ def connect(self):
syn_spec="excitatory_static",
)

## IE and II connections (static)
# IE and II connections (static)
nest.Connect(
pop_I,
pop_all,
Expand Down Expand Up @@ -339,8 +342,6 @@ def connect(self):
nest.Prepare()
nest.Cleanup()

return

def simulate(self, t_sim):
"""
Run simulation.
Expand All @@ -356,8 +357,6 @@ def simulate(self, t_sim):

nest.Simulate(t_sim)

return

def save_parameters(self, filename_root, path):
"""
Save model-instance parameters to file.
Expand All @@ -376,8 +375,6 @@ def save_parameters(self, filename_root, path):

json.dump(self.pars, open("%s/%s.json" % (path, filename_root), "w"), indent=4)

return

def get_connectivity(self, pop_pre, pop_post, filename=None):
"""
Extract connectivity for subpopulations pop_pre and pop_post
Expand Down Expand Up @@ -464,7 +461,7 @@ def get_data_file_list(path, label):
"""

## get list of files names
# get list of files names
files = []
for file_name in os.listdir(path):
if file_name.endswith(".dat") and file_name.startswith(label):
Expand Down Expand Up @@ -513,14 +510,14 @@ def load_spike_data(path, label, time_interval=None, pop=None, skip_rows=3):

files = get_data_file_list(path, label)

## open spike files and read data
# open spike files and read data
spikes = []
for file_name in files:
try:
spikes += [
np.loadtxt("%s/%s" % (path, file_name), skiprows=skip_rows)
] ## load spike file while skipping the header
except:
] # load spike file while skipping the header
except ValueError:
print("Error: %s" % sys.exc_info()[1])
print(
"Remove non-numeric entries from file %s (e.g. in file header) \
Expand All @@ -530,8 +527,8 @@ def load_spike_data(path, label, time_interval=None, pop=None, skip_rows=3):

spikes = np.concatenate(spikes)

## extract spikes in specified time interval
if time_interval != None:
# extract spikes in specified time interval
if time_interval is not None:
if type(time_interval) == tuple:
ind = (spikes[:, 1] >= time_interval[0]) * (spikes[:, 1] <= time_interval[1])
spikes = spikes[ind, :]
Expand All @@ -540,7 +537,7 @@ def load_spike_data(path, label, time_interval=None, pop=None, skip_rows=3):

if type(pop) == nest.NodeCollection:
spikes_subset = []
for cn, nid in enumerate(pop): ## loop over all neurons
for cn, nid in enumerate(pop): # loop over all neurons
print(
"Spike extraction from %d/%d (%d%%) neurons completed"
% (cn + 1, len(pop), 1.0 * (cn + 1) / len(pop) * 100),
Expand All @@ -549,7 +546,7 @@ def load_spike_data(path, label, time_interval=None, pop=None, skip_rows=3):
ind = np.where(spikes[:, 0] == nid)[0]
spikes_subset += list(spikes[ind, :])
spikes = np.array(spikes_subset)
elif pop == None:
elif pop is None:
pass
else:
print("Warning: pop must be a NEST NodeCollection or None. All spikes are loaded.")
Expand All @@ -571,7 +568,8 @@ def load_connectivity_data(path, label, skip_rows=1):
Connectivity file label (file name root).
skip_rows: int, optional
Number of rows to be skipped while reading connectivity files (to remove file headers). The default is 1.
Number of rows to be skipped while reading connectivity files (to remove file headers).
The default is 1.
Returns
-------
Expand All @@ -589,15 +587,16 @@ def load_connectivity_data(path, label, skip_rows=1):

files = get_data_file_list(path, label)

## open weight files and read data
# open weight files and read data
C = []
for file_name in files:
try:
C += [np.loadtxt("%s/%s" % (path, file_name), skiprows=skip_rows)] ## load file while skipping the header
except:
C += [np.loadtxt("%s/%s" % (path, file_name), skiprows=skip_rows)] # load file while skipping the header
except ValueError:
print("Error: %s" % sys.exc_info()[1])
print(
"Remove non-numeric entries from file %s (e.g. in file header) by specifying (optional) parameter 'skip_rows'.\n"
"Remove non-numeric entries from file %s (e.g. in file header) by specifying (optional) parameter \
'skip_rows'.\n"
% (file_name)
)

Expand Down Expand Up @@ -709,10 +708,10 @@ def get_connectivity_matrix(connectivity, pop_pre=[], pop_post=[]):
if len(pop_post) == 0:
pop_post = np.unique(connectivity[:, 1])

## initialise weight matrix
W = np.zeros([len(pop_post), len(pop_pre)]) ## convention: pre = columns, post = rows
# initialise weight matrix
W = np.zeros([len(pop_post), len(pop_pre)]) # convention: pre = columns, post = rows

## fill weight matrix
# fill weight matrix
for c in range(connectivity.shape[0]):
W[get_index(pop_post, connectivity[c, 1]), get_index(pop_pre, connectivity[c, 0])] = connectivity[c, 2]

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@
pars["theta"] = 20.0 # spike threshold(mV)
pars["V_reset"] = 0.0 # reset potential(mV)

## needed for ignore_and_fire version of the model
# needed for ignore_and_fire version of the model
pars["ignore_and_fire_pars"] = {}
pars["ignore_and_fire_pars"]["rate_dist"] = [0.5, 1.5]
pars["ignore_and_fire_pars"]["phase_dist"] = [0.01, 1.0]
Expand Down
Loading

0 comments on commit 62f81e9

Please sign in to comment.