Skip to content

Commit

Permalink
Merge pull request #390 from Hjorthmedh/dev
Browse files Browse the repository at this point in the history
Dev
  • Loading branch information
Hjorthmedh authored Jun 12, 2023
2 parents 975dbdb + c501904 commit 65439a6
Show file tree
Hide file tree
Showing 19 changed files with 464 additions and 786 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ Human Brain Project
[email protected]

## Funding
Horizon 2020 Framework Programme (785907, HBP SGA2); Horizon 2020 Framework Programme (945539, HBP SGA3); Vetenskapsrådet (VR-M-2017-02806, VR-M-2020-01652); Swedish e-science Research Center (SeRC); KTH Digital Futures. The computations are enabled by resources provided by the Swedish National Infrastructure for Computing (SNIC) at PDC KTH partially funded by the Swedish Research Council through grant agreement no. 2018-05973. We acknowledge the use of Fenix Infrastructure resources, which are partially funded from the European Union's Horizon 2020 research and innovation programme through the ICEI project under the grant agreement No. 800858.
Horizon 2020 Framework Programme (785907, HBP SGA2); Horizon 2020 Framework Programme (945539, HBP SGA3); Vetenskapsrådet (VR-M-2017-02806, VR-M-2020-01652); Swedish e-science Research Center (SeRC); KTH Digital Futures. The computations are enabled by resources provided by the Swedish National Infrastructure for Computing (SNIC) at PDC KTH partially funded by the Swedish Research Council through grant agreement no. 2018-05973. We acknowledge the use of Fenix Infrastructure resources, which are partially funded from the European Union's Horizon 2020 research and innovation programme through the ICEI project under the grant agreement No. 800858. Snudda is supported and featured on EBRAINS.

## Citation
Please cite the first paper for the general Snudda network creation and simulation methods, and the second paper for the Striatal microcircutiry model.
Expand Down
725 changes: 49 additions & 676 deletions examples/notebooks/NEST/Snudda-in-NEST.ipynb

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion examples/parallel/KTH_PDC/Dardel_runSnudda.job
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
#SBATCH -e log/runSnudda-%j-error.txt
#SBATCH -t 00:30:00
#SBATCH -J Snudda
#SBATCH -A snic2022-5-245
#SBATCH -A naiss2023-5-231
#SBATCH --nodes=2
#SBATCH -n 256
#SBATCH --cpus-per-task=2
Expand Down
2 changes: 1 addition & 1 deletion examples/parallel/KTH_PDC/Dardel_simulate.job
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
#SBATCH -t 1:59:00
#SBATCH --time-min=1:59:00
#SBATCH -J Simulate
#SBATCH -A snic2022-5-245
#SBATCH -A naiss2023-5-231
#SBATCH --nodes=1-10
#SBATCH --tasks-per-node=128
#SBATCH --mail-type=ALL
Expand Down
2 changes: 1 addition & 1 deletion snudda/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from .core import Snudda

__version__ = "1.4.7"
__version__ = "1.4.71"

from .init import SnuddaInit
from .place import SnuddaPlace
Expand Down
3 changes: 3 additions & 0 deletions snudda/data/nest/synapses/excitatory.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{
"receptor_type": 1
}
3 changes: 3 additions & 0 deletions snudda/data/nest/synapses/excitatory_distal.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{
"receptor_type": 5
}
3 changes: 3 additions & 0 deletions snudda/data/nest/synapses/excitatory_proximal.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{
"receptor_type": 3
}
3 changes: 3 additions & 0 deletions snudda/data/nest/synapses/excitatory_soma.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{
"receptor_type": 1
}
3 changes: 3 additions & 0 deletions snudda/data/nest/synapses/inhibitory.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{
"receptor_type": 2
}
3 changes: 3 additions & 0 deletions snudda/data/nest/synapses/inhibitory_distal.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{
"receptor_type": 6
}
3 changes: 3 additions & 0 deletions snudda/data/nest/synapses/inhibitory_proximal.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{
"receptor_type": 4
}
3 changes: 3 additions & 0 deletions snudda/data/nest/synapses/inhibitory_soma.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{
"receptor_type": 2
}
66 changes: 59 additions & 7 deletions snudda/plotting/Blender/visualisation/visualise_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,13 @@ class VisualiseNetwork(object):

# You need to provide neuron
def __init__(self, network_path, blender_save_file=None, blender_output_image=None,
network_json=None, simulation_output_file_name=None):
network_json=None, simulation_output_file_name=None, use_neuron_cache=True):

self.network_path = network_path
self.snudda_data = get_snudda_data(network_path=network_path)
self.scale_f = 1000 # factor to downscale the data
self.neuron_colour_lookup = dict() # Allow the user to override the neuron colours
self.use_neuron_cache = use_neuron_cache

if network_json:
self.network_json = network_json
Expand Down Expand Up @@ -54,11 +56,22 @@ def __init__(self, network_path, blender_save_file=None, blender_output_image=No
self.sl.import_json(self.network_json)
self.data = self.sl.data

def set_neuron_colour(self, neuron_id, colour):

if len(colour) != 4:
raise ValueError(f"Colour should be R,G,B,alpha (4 values)")
self.neuron_colour_lookup[neuron_id] = colour

def clear_neuron_colours(self):

self.neuron_colour_lookup = dict()

def visualise(self,
neuron_id=None,
blender_output_image=None,
white_background=True,
show_synapses=True,
synapse_colour=None,
synapse_pair_filter=None,
draw_meshes=True,
full_meshes=None,
Expand All @@ -75,6 +88,7 @@ def visualise(self,
blender_output_image
white_background
show_synapses
synapse_colour: R,G,B,alpha (For values, range 0-1). Default None.
synapse_pair_filter (list): List of pairs of neurons (tuples) to show synapses for, default None = no filtering
camera_location
camera_rotation
Expand All @@ -83,7 +97,7 @@ def visualise(self,
"""

if neuron_id:
if neuron_id is not None:
neurons = [self.data["neurons"][x] for x in neuron_id]
else:
neurons = self.data["neurons"]
Expand Down Expand Up @@ -140,6 +154,14 @@ def visualise(self,
bg.inputs[0].default_value[:3] = (0.0, 0.0, 0.0)
bg.inputs[1].default_value = 0.0

#Scott's magic
'''
mat_dspn = bpy.data.materials.new("DSPN")
mat_dspn.use_nodes = True
mat_dspn.node_tree.nodes["Principled BSDF"].inputs[0].default_value = (0, 1, 0, 1)
mat_dspn.node_tree.nodes["Principled BSDF"].inputs['Alpha'].default_value = 1
'''

# Define materials
mat_dspn = bpy.data.materials.new("PKHG")
mat_dspn.diffuse_color = (77. / 255, 151. / 255, 1.0, 0.5)
Expand Down Expand Up @@ -184,12 +206,29 @@ def visualise(self,
"synapse": mat_synapse,
"other": mat_other}

if white_background:
# Add the user requested custom colours
for nid in self.neuron_colour_lookup.keys():
material_lookup[nid] = bpy.data.materials.new(str(nid))
material_lookup[nid].use_nodes = True
material_lookup[nid].node_tree.nodes["Principled BSDF"].inputs[0].default_value = self.neuron_colour_lookup[nid]
material_lookup[nid].node_tree.nodes["Principled BSDF"].inputs['Alpha'].default_value = self.neuron_colour_lookup[nid][-1]

#material_lookup[nid] = bpy.data.materials.new("PKHG")
#material_lookup[nid].diffuse_color = self.neuron_colour_lookup[nid]

if synapse_colour is not None:
mat_synapse.diffuse_color = synapse_colour
elif white_background:
mat_synapse.diffuse_color = (0.8, 0.0, 0.0, 1.0)
else:
mat_synapse.diffuse_color = (1.0, 1.0, 0.9, 1.0)

# matSynapse.use_transparency = True

"""
# We comment out these lines, to get the synapse colour to be set correctly (otherwise they are white)
# Thanks Scott for finding this fix.
mat_synapse.use_nodes = True
if not white_background:
Expand All @@ -201,12 +240,13 @@ def visualise(self,
material_output = mat_synapse.node_tree.nodes.get('Material Output')
mat_synapse.node_tree.links.new(material_output.inputs[0], emission.outputs[0])
"""

for neuron in neurons:
for idx, neuron in enumerate(neurons):

e_rot = mathutils.Matrix(neuron["rotation"].reshape(3, 3)).to_euler()

if neuron["name"] in self.neuron_cache:
if self.use_neuron_cache and neuron["name"] in self.neuron_cache:
# If we already have the object in memory, copy it.
obj = self.neuron_cache[neuron["name"]].copy()

Expand All @@ -220,7 +260,15 @@ def visualise(self,
obj.name = f"{neuron['name']}-{neuron['neuronID']}"
VisualiseNetwork.link_object(obj)
else:
self.read_swc_data(filepath=snudda_parse_path(neuron["morphology"], self.snudda_data), detail_level=detail_level)
if type(detail_level) == np.ndarray:
if len(detail_level) != len(neurons):
raise ValueError(f"detail_level is either 1,2 or 3, "
f"if given as a array must be same length as number of neurons (ie {len(idx)}).")
dl = detail_level[idx]
else:
dl = detail_level

self.read_swc_data(filepath=snudda_parse_path(neuron["morphology"], self.snudda_data), detail_level=dl)
obj = bpy.context.selected_objects[0]
obj.name = f"{neuron['name']}-{neuron['neuronID']}"

Expand All @@ -233,7 +281,11 @@ def visualise(self,

n_type = neuron["type"].lower()

if n_type in material_lookup:
if neuron['neuronID'] in material_lookup:
# Custom colour for neuron (priority)
mat = material_lookup[neuron['neuronID']]
elif n_type in material_lookup:
# Each neuron type has its own colour
mat = material_lookup[n_type]
else:
mat = material_lookup["other"]
Expand Down
10 changes: 9 additions & 1 deletion snudda/utils/ablate_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,9 @@ def filter_synapses(self, data_type):

synapse_data = self.in_file[f"network/{data_type}"][()].copy()

if synapse_data.size == 0:
return np.array([], dtype=int)

keep_flag = np.zeros((synapse_data.shape[0],), dtype=bool)

# Shortcut, if user wants to remove all, skip the processing part
Expand Down Expand Up @@ -354,10 +357,15 @@ def write_network(self, out_file_name=None, print_remapping=False):
row[1] = remap_id[row[1]]
temp_gj_mat[idx, :] = row

if temp_gj_mat.size > 0:
gj_chunk_size = self.in_file["network/gapJunctions"].chunks
else:
gj_chunk_size = None

network_group.create_dataset("gapJunctions",
data=temp_gj_mat,
dtype=np.int32, shape=(num_gj, self.in_file["network/gapJunctions"].shape[1]),
chunks=self.in_file["network/gapJunctions"].chunks,
chunks=gj_chunk_size,
maxshape=(None, self.in_file["network/gapJunctions"].shape[1]),
compression=self.in_file["network/gapJunctions"].compression)

Expand Down
94 changes: 21 additions & 73 deletions snudda/utils/conv_hurt.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@

class ConvHurt(object):

def __init__(self, simulation_structures, base_dir="TEST/", target_simulator="NEST"):
def __init__(self, simulation_structures, base_dir="TEST/", target_simulator="NEST", has_input=False):

self.base_dir = base_dir
self.network_dir = os.path.join(base_dir, 'networks')
Expand All @@ -25,7 +25,7 @@ def __init__(self, simulation_structures, base_dir="TEST/", target_simulator="NE
self.setup_directories(base_dir=base_dir)

self.write_main_config(simulation_structures=simulation_structures,
base_dir=base_dir, target_simulator=target_simulator)
base_dir=base_dir, target_simulator=target_simulator, has_input=has_input)

############################################################################

Expand Down Expand Up @@ -66,7 +66,8 @@ def write_main_config(self,
base_dir="TEST/",
out_file="circuit_config.json",
simulation_structures=[],
target_simulator="NEURON"):
target_simulator="NEURON",
has_input=False):

config = OrderedDict([])

Expand Down Expand Up @@ -102,6 +103,15 @@ def write_main_config(self,
"edge_types_file": os.path.join("$NETWORK_DIR", f"{ss}_edge_types.csv") }
edges.append(edge_info)

if has_input:
node_info = {"nodes_file": os.path.join("$NETWORK_DIR", f"{ss}-input_nodes.hdf5"),
"node_types_file": os.path.join("$NETWORK_DIR", f"{ss}-input_node_types.csv")}
nodes.append(node_info)

edge_info = {"edges_file": os.path.join("$NETWORK_DIR", f"{ss}-input_edges.hdf5"),
"edge_types_file": os.path.join("$NETWORK_DIR", f"{ss}-input_edge_types.csv")}
edges.append(edge_info)

config["networks"] = OrderedDict([("nodes", nodes), ("edges", edges)])

with open(os.path.join(base_dir, out_file), 'wt') as f:
Expand Down Expand Up @@ -361,23 +371,23 @@ def write_edges_csv(self,

############################################################################

def write_input(self, spike_file_name, spikes):

if spikes is None:
print(f"No spikes specified, not writing {spike_file_name}")
print("Use python3 Network_input.py yourinput.json yournetwork.hdf5 input-spikes.hdf5")
return
def write_input(self, spike_file_name, spike_times, gids):

f_name = os.path.join(self.base_dir, spike_file_name)

print(f"Writing spikes to {f_name}")

with h5py.File(f_name, 'w', libver=self.h5py_libver) as f:
self.add_version(f)

print(f"Writing file {f_name}")

s_group = f.create_group("spikes")
s_group.create_dataset("gids", data=spikes[:, 1])
s_group.create_dataset("timestamps", data=spikes[:, 0])
s_group.attrs["sorting"] = "gid"
s_group.create_dataset("gids", data=gids)
s_group.create_dataset("timestamps", data=spike_times*1e3) # Convert to ms

return f_name

############################################################################

Expand Down Expand Up @@ -405,65 +415,3 @@ def add_version(self, hdf5_file):

hdf5_file.attrs["version"] = [0, 1]
hdf5_file.attrs["magic"] = 0x0A7A


if __name__ == "__main__":
# ch = ConvHurt()
# ch = ConvHurt(simulationStructure="cerebellum",
# inputStructures=["pons","cortex"])

ch = ConvHurt(simulation_structure="striatum",
input_structures=["cortex", "thalamus"])

# Test example, we have 5 neurons, big network
# two groups

node_data = {"positions": np.array([[1., 2., 3.], [4., 5., 6.], [7., 8., 9.],
[1., 8., 9.], [2., 3., 2.]]),
"rotation_angle_zaxis": np.array([0.1, 0.2, 0.3, 0.4, 0.5])}

ch.write_nodes(node_file='striatum_nodes.hdf5',
data=node_data,
node_id=np.array([0, 1, 2, 3, 4]),
population_name="striatum_nodes",
node_type_id=np.array([0, 1, 0, 1, 0]),
node_group_id=np.array([0, 0, 1, 1, 1]),
node_group_index=np.array([0, 1, 0, 1, 2]))

node_type_id = np.array([0, 1])
node_data_csv = OrderedDict([('name', ['A', 'B']),
('location', ['structA', 'structB'])])

ch.write_node_csv(node_csv_file='striatum_node_types.csv',
node_type_id=node_type_id,
data=node_data_csv)

edge_group = np.array([5, 5, 11, 11, 11])
edge_group_index = np.array([0, 1, 0, 1, 2])
edge_type_id = np.array([0, 1, 0, 1, 0])
source_gid = np.array([1, 2, 3, 3, 4])
target_gid = np.array([2, 3, 4, 0, 1]) # THESE ARE SORTED ... HAHAHA

# Delay needs to be in ms (bad bad people, real scientists use SI units)
edge_data = OrderedDict([("sec_id", np.array([10, 22, 33, 24, 15])),
("sec_x", np.array([0.1, 0.3, 0.5, 0.2, 0])),
("syn_weight", np.array([0.1e-9, 2e-9, 3e-9,
0.3e-9, 0.1e-9])),
("delay", 1e3 * np.array([1e-3, 4e-3, 2e-3, 5e-3, 1e-3]))])

ch.write_edges(edge_file="striatum_edges.hdf5",
edge_group=edge_group,
edge_group_index=edge_group_index,
edge_type_id=edge_type_id,
edge_population_name="striatum_edges",
source_id=source_gid,
target_id=target_gid,
data=edge_data)

edge_type_id = np.array([0, 1])
edge_csv_data = OrderedDict([('template', ['Exp2Syn', 'NULL']),
('dynamics_params', ["mysyn.json", 'yoursyn.json'])])

ch.write_edges_csv(edge_csv_file="striatum_edge_types.csv",
edge_type_id=edge_type_id,
data=edge_csv_data)
Loading

0 comments on commit 65439a6

Please sign in to comment.