Skip to content

Commit

Permalink
Merge pull request #407 from Hjorthmedh/init_redux
Browse files Browse the repository at this point in the history
Init redux
  • Loading branch information
Hjorthmedh authored Feb 2, 2024
2 parents 04f001f + 59bcf19 commit e6fe68c
Show file tree
Hide file tree
Showing 15 changed files with 592 additions and 977 deletions.
911 changes: 290 additions & 621 deletions examples/notebooks/ProjectionExample/composite_axon_projections.ipynb

Large diffs are not rendered by default.

307 changes: 110 additions & 197 deletions examples/notebooks/custom_slice_example.ipynb

Large diffs are not rendered by default.

233 changes: 128 additions & 105 deletions examples/notebooks/population_unit_network.ipynb

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ export N_WORKERS=$SLURM_NTASKS
export IPNWORKERS=50

# Clear old ipyparallel
export IPYTHONDIR="/cfs/klemming/scratch/${USER:0:1}/$USER/.ipython"
export IPYTHONDIR="/cfs/klemming/scratch/${USER:0:1}/${USER}/.ipython-${SLURM_JOB_ID}"
rm -r $IPYTHONDIR
export IPYTHON_PROFILE=default

Expand Down Expand Up @@ -128,4 +128,5 @@ export FI_CXI_DEFAULT_VNI=$(od -vAn -N4 -tu < /dev/urandom)

srun -n $N_WORKERS $SNUDDA_DIR/examples/parallel/KTH_PDC/input_tuning/x86_64/special -mpi -python $SNUDDA_DIR/snudda/input/input_tuning.py simulate $NETWORK_DIR --seed_list $SEED_LIST


# Cleanup IPYTHONDIR
rm -r $IPYTHONDIR
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ export N_WORKERS=$SLURM_NTASKS
export IPNWORKERS=50

# Clear old ipyparallel
export IPYTHONDIR="/cfs/klemming/scratch/${USER:0:1}/$USER/.ipython"
export IPYTHONDIR="/cfs/klemming/scratch/${USER:0:1}/$USER/.ipython-${SLURM_JOB_ID}"
rm -r $IPYTHONDIR
export IPYTHON_PROFILE=default

Expand Down Expand Up @@ -128,4 +128,5 @@ export FI_CXI_DEFAULT_VNI=$(od -vAn -N4 -tu < /dev/urandom)

srun -n $N_WORKERS $SNUDDA_DIR/examples/parallel/KTH_PDC/input_tuning/x86_64/special -mpi -python $SNUDDA_DIR/snudda/input/input_tuning.py simulate $NETWORK_DIR --seed_list $SEED_LIST


# Cleanup IPYTHONDIR
rm -r $IPYTHONDIR
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ export N_WORKERS=$SLURM_NTASKS
export IPNWORKERS=50

# Clear old ipyparallel
export IPYTHONDIR="/cfs/klemming/scratch/${USER:0:1}/$USER/.ipython"
export IPYTHONDIR="/cfs/klemming/scratch/${USER:0:1}/$USER/.ipython-${SLURM_JOB_ID}"
rm -r $IPYTHONDIR
export IPYTHON_PROFILE=default

Expand Down Expand Up @@ -129,3 +129,5 @@ export FI_CXI_DEFAULT_VNI=$(od -vAn -N4 -tu < /dev/urandom)
srun -n $N_WORKERS $SNUDDA_DIR/examples/parallel/KTH_PDC/input_tuning/x86_64/special -mpi -python $SNUDDA_DIR/snudda/input/input_tuning.py simulate $NETWORK_DIR --seed_list $SEED_LIST


# Cleanup IPYTHONDIR
rm -r $IPYTHONDIR
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ export N_WORKERS=$SLURM_NTASKS
export IPNWORKERS=100

# Clear old ipyparallel
export IPYTHONDIR="/cfs/klemming/scratch/${USER:0:1}/$USER/.ipython"
export IPYTHONDIR="/cfs/klemming/scratch/${USER:0:1}/$USER/.ipython-${SLURM_JOB_ID}"
rm -r $IPYTHONDIR
export IPYTHON_PROFILE=default

Expand Down Expand Up @@ -130,4 +130,6 @@ export FI_CXI_DEFAULT_VNI=$(od -vAn -N4 -tu < /dev/urandom)

srun -n $N_WORKERS $SNUDDA_DIR/examples/parallel/KTH_PDC/input_tuning/x86_64/special -mpi -python $SNUDDA_DIR/snudda/input/input_tuning.py simulate $NETWORK_DIR --seed_list $SEED_LIST

# Cleanup IPYTHONDIR
rm -r $IPYTHONDIR

Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ export N_WORKERS=$SLURM_NTASKS
export IPNWORKERS=100

# Clear old ipyparallel
export IPYTHONDIR="/cfs/klemming/scratch/${USER:0:1}/$USER/.ipython"
export IPYTHONDIR="/cfs/klemming/scratch/${USER:0:1}/$USER/.ipython-${SLURM_JOB_ID}"
rm -r $IPYTHONDIR
export IPYTHON_PROFILE=default

Expand Down Expand Up @@ -130,4 +130,5 @@ export FI_CXI_DEFAULT_VNI=$(od -vAn -N4 -tu < /dev/urandom)

srun -n $N_WORKERS $SNUDDA_DIR/examples/parallel/KTH_PDC/input_tuning/x86_64/special -mpi -python $SNUDDA_DIR/snudda/input/input_tuning.py simulate $NETWORK_DIR --seed_list $SEED_LIST


# Cleanup IPYTHONDIR
rm -r $IPYTHONDIR
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ export N_WORKERS=$SLURM_NTASKS
export IPNWORKERS=100

# Clear old ipyparallel
export IPYTHONDIR="/cfs/klemming/scratch/${USER:0:1}/$USER/.ipython"
export IPYTHONDIR="/cfs/klemming/scratch/${USER:0:1}/$USER/.ipython-${SLURM_JOB_ID}"
rm -r $IPYTHONDIR
export IPYTHON_PROFILE=default

Expand Down Expand Up @@ -131,3 +131,5 @@ export FI_CXI_DEFAULT_VNI=$(od -vAn -N4 -tu < /dev/urandom)
srun -n $N_WORKERS $SNUDDA_DIR/examples/parallel/KTH_PDC/input_tuning/x86_64/special -mpi -python $SNUDDA_DIR/snudda/input/input_tuning.py simulate $NETWORK_DIR --seed_list $SEED_LIST


# Cleanup IPYTHONDIR
rm -r $IPYTHONDIR
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,15 @@
else:
input_correlation = None


if os.getenv("NUM_INPUT_MAX"):
num_input_max = float(os.getenv("NUM_INPUT_MAX"))
else:
num_input_max = 250

if os.getenv("NUM_REPLICAS"):
num_replicas = os.getenv("NUM_REPLICAS")
else:
num_replicas = 20

if os.getenv("SEED_LIST"):
seed_list = ast.literal_eval(os.getenv("SEED_LIST"))
Expand Down Expand Up @@ -59,14 +67,14 @@

neurons_path = os.path.join("$DATA", "neurons", "striatum")
input_tuning.setup_network(neurons_path=neurons_path,
num_replicas=20,
num_replicas=num_replicas,
neuron_types=neuron_type)

print("Calling setup_input")

input_tuning.setup_input(input_type=input_type, # eg. "cortical" or "thalamic"
num_input_min=1,
num_input_max=200,
num_input_max=num_input_max,
input_duration=10.0,
input_frequency_range=[input_freq],
input_correlation=input_correlation,
Expand Down
2 changes: 1 addition & 1 deletion snudda/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -637,7 +637,7 @@ def simulate(self,
neuromodulation=None,
disable_synapses=None,
disable_gj=False,
record_volt=False,
record_volt=True,
record_all=False,
simulation_config=None,
export_core_neuron=False,
Expand Down
23 changes: 16 additions & 7 deletions snudda/neurons/neuron_morphology_extended.py
Original file line number Diff line number Diff line change
Expand Up @@ -421,11 +421,18 @@ def dendrite_input_locations(self, synapse_density_str, rng, num_locations,

if num_locations is not None:
try:
syn_idx = rng.choice(a=dend_idx, size=num_locations, replace=True,
if cluster_size is not None:
unique_locations = int(np.ceil(num_locations / cluster_size))
else:
unique_locations = num_locations

# Cluster synapses are placed around these unique locations with cluster_spread*2
syn_idx = rng.choice(a=dend_idx, size=unique_locations, replace=True,
p=expected_synapses[dend_idx] / expected_sum)
except:
print(f"dend_idx={dend_idx}\n"
f"num_locations={num_locations}\n"
f"unique_locations={unique_locations}\n"
f"p={expected_synapses[dend_idx] / expected_sum}")
import traceback
self.write_log(traceback.format_exc(), is_error=True)
Expand Down Expand Up @@ -461,20 +468,22 @@ def dendrite_input_locations(self, synapse_density_str, rng, num_locations,

list_cluster_syn_idx = []

for closest_point_idx in lust_of_closest_point_idx:
list_cluster_syn_idx.append(rng.choice(closest_point_idx, size=cluster_size, replace=True))
for closest_point_idxs in lust_of_closest_point_idx:
# lust_of_closest_point_idx is indexed onto geometry[dend_idx, :3]
list_cluster_syn_idx.append(rng.choice(dend_idx[closest_point_idxs], size=cluster_size, replace=True))

cluster_syn_idx = np.concatenate(list_cluster_syn_idx)

num_locations = len(cluster_syn_idx)
comp_x = rng.random(num_locations)
# Make sure we only have num_location points, remove any excess
cluster_syn_idx = cluster_syn_idx[:num_locations]

num_pos = len(cluster_syn_idx)
comp_x = rng.random(num_pos)
xyz = comp_x[:, None] * geometry[cluster_syn_idx, :3] + (1 - comp_x[:, None]) * geometry[parent_idx[cluster_syn_idx], :3]
sec_id = section_data[cluster_syn_idx, 0]
sec_x = comp_x * section_data[cluster_syn_idx, 1] + (1 - comp_x) * section_data[parent_idx[cluster_syn_idx], 1]
dist_to_soma = comp_x * geometry[cluster_syn_idx, 4] + (1 - comp_x) * geometry[parent_idx[cluster_syn_idx], 4]

# TODO: Check that this is correct!!

return xyz, sec_id, sec_x / 1e3, dist_to_soma

def cluster_synapses(self, sec_id, sec_x, count, distance, rng):
Expand Down
1 change: 1 addition & 0 deletions snudda/simulate/save_network_recording.py
Original file line number Diff line number Diff line change
Expand Up @@ -323,6 +323,7 @@ def write(self):

if int(self.pc.id()) == 0:

# TODO: We need to save max time even if we do not save the soma voltage(!)
out_file = h5py.File(self.output_file, "a")
if "time" not in out_file and self.time is not None:
print(f"Using sample dt = {self.sample_dt} (sample step size {sample_step})")
Expand Down
4 changes: 2 additions & 2 deletions tests/networks/network_testing_input/input-test-1.json
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,8 @@
"type" : "AMPA_NMDA",
"synapseDensity" : "0.45*0.05/(1+exp(-(d-30e-6)/5e-6))",
"nInputs": 2000,
"clusterSize" : 3,
"clusterSpread" : 30e-6,
"clusterSize" : 10,
"clusterSpread" : 5e-6,
"frequency" : [4, 8, 10],
"populationUnitCorrelation" : 0.2,
"jitter" : 0.00,
Expand Down
47 changes: 15 additions & 32 deletions tests/test_input.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,14 @@ class InputTestCase(unittest.TestCase):

def setUp(self):

print("RUNNING SETUP")
os.chdir(os.path.dirname(__file__))

self.network_path = os.path.join("networks", "network_testing_input")
self.config_file = os.path.join(self.network_path, "network-config.json")
self.position_file = os.path.join(self.network_path, "network-neuron-positions.hdf5")
self.save_file = os.path.join(self.network_path, "voxels", "network-putative-synapses.hdf5")
self.network_file = os.path.join(self.network_path, "network-synapses.hdf5")

# Setup network so we can test input generation
from snudda.init.init import SnuddaInit
Expand All @@ -32,28 +34,9 @@ def setUp(self):

cnc.write_json(self.config_file)

# Place neurons
from snudda.place.place import SnuddaPlace
npn = SnuddaPlace(config_file=self.config_file,
log_file=None,
verbose=True,
d_view=None, # TODO: If d_view is None code run sin serial, add test parallel
h5libver="latest")
npn.parse_config()
npn.write_data(self.position_file)

# Detect
self.sd = SnuddaDetect(config_file=self.config_file, position_file=self.position_file,
save_file=self.save_file, rc=None,
hyper_voxel_size=120, verbose=True)

self.sd.detect(restart_detection_flag=True)

# Prune
self.network_file = os.path.join(self.network_path, "network-synapses.hdf5")

sp = SnuddaPrune(network_path=self.network_path, config_file=None) # Use default config file
sp.prune()
from snudda import Snudda
snd = Snudda(network_path=self.network_path)
snd.create_network()

def test_generate(self):

Expand Down Expand Up @@ -154,8 +137,9 @@ def test_input_1(self):
config_n_inputs = config_data[neuron_type][input_type]['nInputs'][neuron_name]
else:
config_n_inputs = config_data[neuron_type][input_type]['nInputs']
print(f"Checking number of inputs is {config_n_inputs} * {cluster_size}")
self.assertEqual(config_n_inputs * cluster_size, n_traces)
print(f"Checking number of inputs is {config_n_inputs} (cluster size used: {cluster_size})")
# self.assertEqual(config_n_inputs * cluster_size, n_traces)
self.assertEqual(config_n_inputs, n_traces)

# TODO: We can no longer assume that sectionID is the same for all inputs in a cluster
# the new code also works at branch points, so cluster can be spread over different sections.
Expand All @@ -164,7 +148,6 @@ def test_input_1(self):
# for ctr in range(0, cluster_size-1):
# self.assertTrue(np.all(np.diff(input_info["sectionID"])[ctr::cluster_size] == 0))


max_len = 1
if type(start_time) is np.ndarray:
max_len = np.maximum(max_len, len(start_time))
Expand Down Expand Up @@ -312,7 +295,6 @@ def test_input_2(self):

# TODO: Add checks


def test_arbitrary_function(self):

func_lambda = lambda t: t*100
Expand Down Expand Up @@ -344,12 +326,6 @@ def test_arbitrary_function(self):
self.assertTrue((t_check-1)*80 <= freq <= (t_check-1)*120,
f"Found frequency {freq} Hz at {t_check}s, expected {t_check*100} Hz")

def find_spikes_in_range(self, spikes, time_range):
t_idx = np.where(np.logical_and(time_range[0] <= spikes, spikes <= time_range[1]))[0]
return spikes[t_idx]

def find_freq_in_range(self, spikes, time_range):
return len(self.find_spikes_in_range(spikes, time_range)) / (time_range[1] - time_range[0])

def test_arbitrary_function_range(self):

Expand Down Expand Up @@ -405,6 +381,13 @@ def test_fraction_mixing(self):
self.assertTrue(1 < np.sum(n_a_2) < 6)
self.assertTrue(20 < np.sum(n_b_2) < 30)

def find_spikes_in_range(self, spikes, time_range):
t_idx = np.where(np.logical_and(time_range[0] <= spikes, spikes <= time_range[1]))[0]
return spikes[t_idx]

def find_freq_in_range(self, spikes, time_range):
return len(self.find_spikes_in_range(spikes, time_range)) / (time_range[1] - time_range[0])


if __name__ == '__main__':
unittest.main()

0 comments on commit e6fe68c

Please sign in to comment.