Skip to content

Commit

Permalink
enable loading of legacy tissue contours saved as pickle
Browse files Browse the repository at this point in the history
  • Loading branch information
afrendeiro committed Jul 4, 2024
1 parent 71961a7 commit 262616e
Showing 1 changed file with 55 additions and 19 deletions.
74 changes: 55 additions & 19 deletions wsi/wsi.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,40 @@ def __init__(
def __repr__(self):
return f"WholeSlideImage('{self.path}')"

def _assert_level_downsamples(self):
level_downsamples = []
dim_0 = self.wsi.level_dimensions[0]

for downsample, dim in zip(self.wsi.level_downsamples, self.wsi.level_dimensions):
estimated_downsample = (dim_0[0] / float(dim[0]), dim_0[1] / float(dim[1]))
(
level_downsamples.append(estimated_downsample)
if estimated_downsample
!= (
downsample,
downsample,
)
else level_downsamples.append((downsample, downsample))
)

return level_downsamples

def _load_segmentation_legacy(self, pickle_file: Path | None = None) -> None:
import warnings
import pickle

warnings.warn(
"Loading segmentation results from a pickle file is deprecated. "
"Save segmentation results to an HDF5 file instead.",
)

if pickle_file is None:
pickle_file = self.path.with_suffix(".segmentation.pickle")

data = pickle.load(pickle_file.open("rb"))
self.contours_tissue = data["tissue"]
self.holes_tissue = data["holes"]

def load_segmentation(self, hdf5_file: Path | None = None) -> None:
"""
Load slide segmentation results from pickle file.
Expand All @@ -126,7 +160,27 @@ def load_segmentation(self, hdf5_file: Path | None = None) -> None:
if hdf5_file is None:
hdf5_file = self.hdf5_file

legacy_file = self.path.with_suffix(".segmentation.pickle")
if not hdf5_file.exists():
if legacy_file.exists():
self._load_segmentation_legacy(legacy_file)
return

req = [
"contours_tissue_breakpoints",
"contours_tissue",
"holes_tissue_breakpoints",
"holes_tissue",
]

with h5py.File(hdf5_file, "r") as f:
for r in req:
if r not in f:
print(f"H5 file {hdf5_file} did not have the required keys!")
if legacy_file.exists():
self._load_segmentation_legacy(legacy_file)
return
raise ValueError(f"Required dataset {r} not found in {hdf5_file}")
bpt = f["contours_tissue_breakpoints"][()]
ct = f["contours_tissue"][()]
self.contours_tissue = [
Expand Down Expand Up @@ -160,7 +214,7 @@ def save_segmentation(self, hdf5_file: Path | None = None, mode: str = "a") -> N
"""
if hdf5_file is None:
hdf5_file = self.hdf5_file
with h5py.File(self.hdf5_file, mode) as f:
with h5py.File(hdf5_file, mode) as f:
data = np.concatenate(self.contours_tissue)
f.create_dataset("contours_tissue", data=data)
bpt = [0] + np.cumsum([c.shape[0] for c in self.contours_tissue]).tolist()
Expand Down Expand Up @@ -467,24 +521,6 @@ def _scale_holes_dim(contours, scale):
for holes in contours
]

def _assert_level_downsamples(self):
level_downsamples = []
dim_0 = self.wsi.level_dimensions[0]

for downsample, dim in zip(self.wsi.level_downsamples, self.wsi.level_dimensions):
estimated_downsample = (dim_0[0] / float(dim[0]), dim_0[1] / float(dim[1]))
(
level_downsamples.append(estimated_downsample)
if estimated_downsample
!= (
downsample,
downsample,
)
else level_downsamples.append((downsample, downsample))
)

return level_downsamples

def _process_contours(
self,
save_path: tp.Optional[Path] = None,
Expand Down

0 comments on commit 262616e

Please sign in to comment.