Passed
Pull Request — master (#182)
by Fernando
58s
created

GridSampler.get_patches_locations()   A

Complexity

Conditions 3

Size

Total Lines 20
Code Lines 19

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
cc 3
eloc 19
nop 3
dl 0
loc 20
rs 9.45
c 0
b 0
f 0
1
import numpy as np
2
from torch.utils.data import Dataset
3
4
from ..sampler.sampler import PatchSampler
5
from ...utils import to_tuple
6
from ...torchio import LOCATION, TypeTuple, TypeTripletInt
7
from ..subject import Subject
8
9
10
class GridSampler(PatchSampler, Dataset):
11
    r"""Extract patches across a whole volume.
12
13
    Grid samplers are useful to perform inference using all patches from a
14
    volume. It is often used with a :py:class:`~torchio.data.GridAggregator`.
15
16
    Args:
17
        sample: Instance of :py:class:`~torchio.data.subject.Subject`
18
            from which patches will be extracted.
19
        patch_size: Tuple of integers :math:`(d, h, w)` to generate patches
20
            of size :math:`d \times h \times w`.
21
            If a single number :math:`n` is provided,
22
            :math:`d = h = w = n`.
23
        patch_overlap: Tuple of even integers :math:`(d_o, h_o, w_o)` specifying
24
            the overlap between patches for dense inference. If a single number
25
            :math:`n` is provided, :math:`d_o = h_o = w_o = n`.
26
27
    .. note:: Adapted from NiftyNet. See `this NiftyNet tutorial
28
        <https://niftynet.readthedocs.io/en/dev/window_sizes.html>`_ for more
29
        information.
30
    """
31
    def __init__(
32
            self,
33
            sample: Subject,
34
            patch_size: TypeTuple,
35
            patch_overlap: TypeTuple,
36
            ):
37
        self.sample = sample
38
        PatchSampler.__init__(self, patch_size)
39
        self.patch_overlap = to_tuple(patch_overlap, length=3)
40
        sizes = self.sample.spatial_shape, self.patch_size, self.patch_overlap
41
        self.parse_sizes(*sizes)
42
        self.locations = self.get_patches_locations(*sizes)
43
44
    def __len__(self):
45
        return len(self.locations)
46
47
    def __getitem__(self, index):
48
        # Assume 3D
49
        location = self.locations[index]
50
        index_ini = location[:3]
51
        index_fin = location[3:]
52
        cropped_sample = self.extract_patch(self.sample, index_ini, index_fin)
53
        cropped_sample[LOCATION] = location
54
        return cropped_sample
55
56
    @staticmethod
57
    def parse_sizes(
58
            image_size: TypeTripletInt,
59
            patch_size: TypeTripletInt,
60
            patch_overlap: TypeTripletInt,
61
            ) -> None:
62
        image_size = np.array(image_size)
63
        patch_size = np.array(patch_size)
64
        patch_overlap = np.array(patch_overlap)
65
        if np.any(patch_size > image_size):
66
            message = (
67
                f'Patch size {tuple(patch_size)} cannot be'
68
                f' larger than image size {tuple(image_size)}'
69
            )
70
            raise ValueError(message)
71
        if np.any(patch_overlap >= patch_size):
72
            message = (
73
                f'Patch overlap {tuple(patch_overlap)} must be smaller'
74
                f' than patch size {tuple(image_size)}'
75
            )
76
            raise ValueError(message)
77
        if np.any(patch_overlap % 2):
78
            message = (
79
                'Patch overlap must be a tuple of even integers,'
80
                f' not {tuple(patch_overlap)}'
81
            )
82
            raise ValueError(message)
83
84
    def extract_patch(
85
            self,
86
            sample: Subject,
87
            index_ini: TypeTripletInt,
88
            index_fin: TypeTripletInt,
89
            ) -> Subject:
90
        crop = self.get_crop_transform(
91
            sample.spatial_shape,
92
            index_ini,
93
            index_fin - index_ini,
94
        )
95
        cropped_sample = crop(sample)
96
        return cropped_sample
97
98
    @staticmethod
99
    def get_patches_locations(
100
            image_size: TypeTripletInt,
101
            patch_size: TypeTripletInt,
102
            patch_overlap: TypeTripletInt,
103
            ) -> np.ndarray:
104
        indices = []
105
        zipped = zip(image_size, patch_size, patch_overlap)
106
        for im_size_dim, patch_size_dim, patch_overlap_dim in zipped:
107
            end = im_size_dim + 1 - patch_size_dim
108
            step = patch_size_dim - patch_overlap_dim
109
            indices_dim = list(range(0, end, step))
110
            if im_size_dim % step:
111
                indices_dim.append(im_size_dim - patch_size_dim)
112
            indices.append(indices_dim)
113
        indices_ini = np.array(np.meshgrid(*indices)).reshape(3, -1).T
114
        indices_ini = np.unique(indices_ini, axis=0)
115
        indices_fin = indices_ini + np.array(patch_size)
116
        locations = np.hstack((indices_ini, indices_fin))
117
        return np.array(sorted(locations.tolist()))
118