Skip to content

Commit

Permalink
removed excessive print statements
Browse files Browse the repository at this point in the history
  • Loading branch information
PolarBean committed Oct 23, 2024
1 parent 94b5c48 commit c5a041f
Show file tree
Hide file tree
Showing 4 changed files with 20 additions and 51 deletions.
34 changes: 10 additions & 24 deletions PyNutil/coordinate_extraction.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,6 @@ def folder_to_atlas_space(
atlas_labels,
pixel_id=[0, 0, 0],
non_linear=True,
method="all",
object_cutoff=0,
atlas_volume=None,
use_flat=False,
Expand Down Expand Up @@ -194,7 +193,6 @@ def folder_to_atlas_space(
centroids_list,
region_areas_list,
index,
method,
object_cutoff,
atlas_volume,
use_flat,
Expand Down Expand Up @@ -237,9 +235,7 @@ def folder_to_atlas_space(

def load_segmentation(segmentation_path: str):
"""Load a segmentation from a file."""
print(f"working on {segmentation_path}")
if segmentation_path.endswith(".dzip"):
print("Reconstructing dzi")
return reconstruct_dzi(segmentation_path)
else:
return cv2.imread(segmentation_path)
Expand Down Expand Up @@ -282,7 +278,6 @@ def get_region_areas(
def get_transformed_coordinates(
non_linear,
slice_dict,
method,
scaled_x,
scaled_y,
centroids,
Expand All @@ -292,17 +287,15 @@ def get_transformed_coordinates(
):
new_x, new_y, centroids_new_x, centroids_new_y = None, None, None, None
if non_linear and "markers" in slice_dict:
if method in ["per_pixel", "all"] and scaled_x is not None:
if scaled_x is not None:
new_x, new_y = transform_vec(triangulation, scaled_x, scaled_y)
if method in ["per_object", "all"] and centroids is not None:
if centroids is not None:
centroids_new_x, centroids_new_y = transform_vec(
triangulation, scaled_centroidsX, scaled_centroidsY
)
else:
if method in ["per_pixel", "all"]:
new_x, new_y = scaled_x, scaled_y
if method in ["per_object", "all"]:
centroids_new_x, centroids_new_y = scaled_centroidsX, scaled_centroidsY
new_x, new_y = scaled_x, scaled_y
centroids_new_x, centroids_new_y = scaled_centroidsX, scaled_centroidsY
return new_x, new_y, centroids_new_x, centroids_new_y


Expand All @@ -317,7 +310,6 @@ def segmentation_to_atlas_space(
centroids_list=None,
region_areas_list=None,
index=None,
method="per_pixel",
object_cutoff=0,
atlas_volume=None,
use_flat=False,
Expand Down Expand Up @@ -346,29 +338,26 @@ def segmentation_to_atlas_space(
)
centroids, points = None, None
scaled_centroidsX, scaled_centroidsY, scaled_x, scaled_y = None, None, None, None
if method in ["per_object", "all"]:
centroids, scaled_centroidsX, scaled_centroidsY = get_centroids(
segmentation, pixel_id, y_scale, x_scale, object_cutoff
)
if method in ["per_pixel", "all"]:
scaled_y, scaled_x = get_scaled_pixels(segmentation, pixel_id, y_scale, x_scale)
centroids, scaled_centroidsX, scaled_centroidsY = get_centroids(
segmentation, pixel_id, y_scale, x_scale, object_cutoff
)
scaled_y, scaled_x = get_scaled_pixels(segmentation, pixel_id, y_scale, x_scale)

new_x, new_y, centroids_new_x, centroids_new_y = get_transformed_coordinates(
non_linear,
slice_dict,
method,
scaled_x,
scaled_y,
centroids,
scaled_centroidsX,
scaled_centroidsY,
triangulation,
)
if method in ["per_pixel", "all"] and new_x is not None:
if new_x is not None:
points = transform_to_atlas_space(
slice_dict["anchoring"], new_y, new_x, reg_height, reg_width
)
if method in ["per_object", "all"] and centroids_new_x is not None:
if centroids_new_x is not None:
centroids = transform_to_atlas_space(
slice_dict["anchoring"],
centroids_new_y,
Expand All @@ -387,9 +376,6 @@ def get_centroids(segmentation, pixel_id, y_scale, x_scale, object_cutoff=0):
centroids, area, coords = get_centroids_and_area(
binary_seg, pixel_cut_off=object_cutoff
)

print(f"using pixel id {pixel_id}")
print(f"Found {len(centroids)} objects in the segmentation")
if len(centroids) == 0:
return None, None, None
centroidsX = centroids[:, 1]
Expand Down
3 changes: 1 addition & 2 deletions PyNutil/counting_and_load.py
Original file line number Diff line number Diff line change
Expand Up @@ -230,8 +230,7 @@ def flat_to_dataframe(
image = read_flat_file(file)
elif file.endswith(".seg"):
image = read_seg_file(file)
print("datatype", image.dtype)
print("image shape open", image.shape)


if rescaleXY:
image_shapeY, image_shapeX = image.shape[0], image.shape[1]
Expand Down
33 changes: 9 additions & 24 deletions PyNutil/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ def __init__(self, segmentation_folder=None, alignment_json=None, colour=None,
def _check_atlas_name(self):
if not self.atlas_name:
raise ValueError("Atlas name must be specified")

def _load_settings(self, settings_file):
if settings_file:
with open(settings_file, "r") as f:
Expand All @@ -58,7 +58,6 @@ def _load_settings(self, settings_file):

def load_atlas_data(self, atlas_name):
"""Loads the atlas volume and labels from disk."""
print("loading atlas volume")
atlas = brainglobe_atlasapi.BrainGlobeAtlas(atlas_name=atlas_name)
atlas_structures = {
"idx": [i["id"] for i in atlas.structures_list],
Expand All @@ -79,28 +78,23 @@ def load_atlas_data(self, atlas_name):
return atlas_volume, atlas_labels

def _process_atlas_volume(self, atlas):
if "allen_mouse_" in self.atlas_name:
print("reorienting allen atlas into quicknii space...")
return np.transpose(atlas.annotation, [2, 0, 1])[:, ::-1, ::-1]
else:
return atlas.annotation
print("reorienting brainglobe atlas into quicknii space...")
return np.transpose(atlas.annotation, [2, 0, 1])[:, ::-1, ::-1]


def load_custom_atlas(self, atlas_path, label_path):
atlas_volume = read_atlas_volume(atlas_path)
atlas_labels = pd.read_csv(label_path)
return atlas_volume, atlas_labels

def get_coordinates(self, non_linear=True, method="all", object_cutoff=0, use_flat=False):
def get_coordinates(self, non_linear=True, object_cutoff=0, use_flat=False):
"""Extracts pixel coordinates from the segmentation data."""
self._validate_method(method)
print("extracting coordinates with method:", method)
pixel_points, centroids, region_areas_list, points_len, centroids_len, segmentation_filenames = folder_to_atlas_space(
self.segmentation_folder,
self.alignment_json,
self.atlas_labels,
pixel_id=self.colour,
non_linear=non_linear,
method=method,
object_cutoff=object_cutoff,
atlas_volume=self.atlas_volume,
use_flat=use_flat,
Expand All @@ -111,19 +105,14 @@ def get_coordinates(self, non_linear=True, method="all", object_cutoff=0, use_fl
self.centroids_len = centroids_len
self.segmentation_filenames = segmentation_filenames
self.region_areas_list = region_areas_list
self.method = method

def _validate_method(self, method):
valid_methods = ["per_pixel", "per_object", "all"]
if method not in valid_methods:
raise ValueError(f"method {method} not recognised, valid methods are: {', '.join(valid_methods)}")

def quantify_coordinates(self):
"""Quantifies the pixel coordinates by region."""
self._check_coordinates_extracted()
print("quantifying coordinates")
labeled_points_centroids = self._label_points(self.centroids) if self.method in ["per_object", "all"] else None
labeled_points = self._label_points(self.pixel_points) if self.method in ["per_pixel", "all"] else None
labeled_points_centroids = self._label_points(self.centroids)
labeled_points = self._label_points(self.pixel_points)

self._quantify_per_section(labeled_points, labeled_points_centroids)
self._combine_slice_reports()
Expand All @@ -146,8 +135,8 @@ def _quantify_per_section(self, labeled_points, labeled_points_centroids):
per_section_df = []

for pl, cl, ra in zip(self.points_len, self.centroids_len, self.region_areas_list):
current_centroids = labeled_points_centroids[prev_cl : prev_cl + cl] if self.method in ["per_object", "all"] else None
current_points = labeled_points[prev_pl : prev_pl + pl] if self.method in ["per_pixel", "all"] else None
current_centroids = labeled_points_centroids[prev_cl : prev_cl + cl]
current_points = labeled_points[prev_pl : prev_pl + pl]
current_df = pixel_count_per_region(current_points, current_centroids, self.atlas_labels)
current_df_new = self._merge_dataframes(current_df, ra)
per_section_df.append(current_df_new)
Expand Down Expand Up @@ -213,13 +202,9 @@ def _save_per_section_reports(self, output_folder):
prev_pl += pl

def _save_per_section_meshview(self, output_folder, split_fn, pl, cl, prev_pl, prev_cl):
if self.method in ["per_pixel", "all"]:
write_points_to_meshview(self.pixel_points[prev_pl : pl + prev_pl], self.labeled_points[prev_pl : pl + prev_pl], f"{output_folder}/per_section_meshview/{split_fn}_pixels.json", self.atlas_labels)
if self.method in ["per_object", "all"]:
write_points_to_meshview(self.centroids[prev_cl : cl + prev_cl], self.labeled_points_centroids[prev_cl : cl + prev_cl], f"{output_folder}/per_section_meshview/{split_fn}_centroids.json", self.atlas_labels)

def _save_whole_series_meshview(self, output_folder):
if self.method in ["per_pixel", "all"]:
write_points_to_meshview(self.pixel_points, self.labeled_points, f"{output_folder}/whole_series_meshview/pixels_meshview.json", self.atlas_labels)
if self.method in ["per_object", "all"]:
write_points_to_meshview(self.centroids, self.labeled_points_centroids, f"{output_folder}/whole_series_meshview/objects_meshview.json", self.atlas_labels)
1 change: 0 additions & 1 deletion tests/test_quantification.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@ def run_test_case(self, test_case_filename):
np.testing.assert_array_almost_equal(
pnt.label_df["region_area"].values, expected_region_area["region_area"].values
)

test_case_files = [
"brainglobe_atlas.json"
]
Expand Down

0 comments on commit c5a041f

Please sign in to comment.