| @@ 75-91 (lines=17) @@ | ||
| 72 | ) |
|
| 73 | return cropped_sample |
|
| 74 | ||
| 75 | @staticmethod |
|
| 76 | def copy_and_crop( |
|
| 77 | sample: Subject, |
|
| 78 | index_ini: np.ndarray, |
|
| 79 | index_fin: np.ndarray, |
|
| 80 | ) -> dict: |
|
| 81 | cropped_sample = {} |
|
| 82 | iterable = sample.get_images_dict(intensity_only=False).items() |
|
| 83 | for image_name, image in iterable: |
|
| 84 | cropped_sample[image_name] = copy.deepcopy(image) |
|
| 85 | sample_image_dict = image |
|
| 86 | cropped_image_dict = cropped_sample[image_name] |
|
| 87 | cropped_image_dict[DATA] = crop( |
|
| 88 | sample_image_dict[DATA], index_ini, index_fin) |
|
| 89 | # torch doesn't like uint16 |
|
| 90 | cropped_sample['index_ini'] = index_ini.astype(int) |
|
| 91 | return cropped_sample |
|
| 92 | ||
| 93 | @staticmethod |
|
| 94 | def _grid_spatial_coordinates( |
|
| @@ 218-230 (lines=13) @@ | ||
| 215 | center = np.array(center).astype(int) |
|
| 216 | return center |
|
| 217 | ||
| 218 | def copy_and_crop(self, sample, index_ini: np.ndarray) -> dict: |
|
| 219 | index_fin = index_ini + self.patch_size |
|
| 220 | cropped_sample = copy.deepcopy(sample) |
|
| 221 | iterable = sample.get_images_dict(intensity_only=False).items() |
|
| 222 | for image_name, image in iterable: |
|
| 223 | cropped_sample[image_name] = copy.deepcopy(image) |
|
| 224 | sample_image_dict = image |
|
| 225 | cropped_image_dict = cropped_sample[image_name] |
|
| 226 | cropped_image_dict[DATA] = self.crop( |
|
| 227 | sample_image_dict[DATA], index_ini, index_fin) |
|
| 228 | # torch doesn't like uint16 |
|
| 229 | cropped_sample['index_ini'] = index_ini.astype(int) |
|
| 230 | return cropped_sample |
|
| 231 | ||
| 232 | @staticmethod |
|
| 233 | def crop( |
|