Passed
Pull Request — master (#182)
by Fernando
57s
created

torchio.data.inference.grid_sampler   A

Complexity

Total Complexity 12

Size/Duplication

Total Lines 134
Duplicated Lines 0 %

Importance

Changes 0
Metric Value
eloc 88
dl 0
loc 134
rs 10
c 0
b 0
f 0
wmc 12

6 Methods

Rating   Name   Duplication   Size   Complexity  
A GridSampler.__init__() 0 20 2
A GridSampler.parse_sizes() 0 27 4
A GridSampler.__getitem__() 0 8 1
A GridSampler.get_patches_locations() 0 20 3
A GridSampler.extract_patch() 0 13 1
A GridSampler.__len__() 0 2 1
1
from typing import Union
2
3
import numpy as np
4
from torch.utils.data import Dataset
5
6
from ...utils import to_tuple
7
from ...torchio import LOCATION, TypeTuple, TypeTripletInt
8
from ..subject import Subject
9
from ..sampler.sampler import PatchSampler
10
11
12
class GridSampler(PatchSampler, Dataset):
13
    r"""Extract patches across a whole volume.
14
15
    Grid samplers are useful to perform inference using all patches from a
16
    volume. It is often used with a :py:class:`~torchio.data.GridAggregator`.
17
18
    Args:
19
        sample: Instance of :py:class:`~torchio.data.subject.Subject`
20
            from which patches will be extracted.
21
        patch_size: Tuple of integers :math:`(d, h, w)` to generate patches
22
            of size :math:`d \times h \times w`.
23
            If a single number :math:`n` is provided,
24
            :math:`d = h = w = n`.
25
        patch_overlap: Tuple of even integers :math:`(d_o, h_o, w_o)` specifying
26
            the overlap between patches for dense inference. If a single number
27
            :math:`n` is provided, :math:`d_o = h_o = w_o = n`.
28
        padding_mode: Same as :attr:`padding_mode` in
29
            :py:class:`~torchio.transforms.Pad`. If ``None``, the volume will
30
            not be padded before sampling and patches at the border will not be
31
            cropped by the aggregator.
32
33
    .. note:: Adapted from NiftyNet. See `this NiftyNet tutorial
34
        <https://niftynet.readthedocs.io/en/dev/window_sizes.html>`_ for more
35
        information about patch based sampling. Note that
36
        :py:attr:`patch_overlap` is twice :py:attr:`border` in NiftyNet
37
        tutorial.
38
    """
39
    def __init__(
40
            self,
41
            sample: Subject,
42
            patch_size: TypeTuple,
43
            patch_overlap: TypeTuple = (0, 0, 0),
44
            padding_mode: Union[str, float, None] = None,
45
            ):
46
        self.sample = sample
47
        self.patch_overlap = np.array(to_tuple(patch_overlap, length=3))
48
        self.padding_mode = padding_mode
49
        if padding_mode is not None:
50
            from ...transforms import Pad
51
            border = self.patch_overlap // 2
52
            padding = border.repeat(2)
53
            pad = Pad(padding, padding_mode=padding_mode)
54
            self.sample = pad(self.sample)
55
        PatchSampler.__init__(self, patch_size)
56
        sizes = self.sample.spatial_shape, self.patch_size, self.patch_overlap
57
        self.parse_sizes(*sizes)
58
        self.locations = self.get_patches_locations(*sizes)
59
60
    def __len__(self):
61
        return len(self.locations)
62
63
    def __getitem__(self, index):
64
        # Assume 3D
65
        location = self.locations[index]
66
        index_ini = location[:3]
67
        index_fin = location[3:]
68
        cropped_sample = self.extract_patch(self.sample, index_ini, index_fin)
69
        cropped_sample[LOCATION] = location
70
        return cropped_sample
71
72
    @staticmethod
73
    def parse_sizes(
74
            image_size: TypeTripletInt,
75
            patch_size: TypeTripletInt,
76
            patch_overlap: TypeTripletInt,
77
            ) -> None:
78
        image_size = np.array(image_size)
79
        patch_size = np.array(patch_size)
80
        patch_overlap = np.array(patch_overlap)
81
        if np.any(patch_size > image_size):
82
            message = (
83
                f'Patch size {tuple(patch_size)} cannot be'
84
                f' larger than image size {tuple(image_size)}'
85
            )
86
            raise ValueError(message)
87
        if np.any(patch_overlap >= patch_size):
88
            message = (
89
                f'Patch overlap {tuple(patch_overlap)} must be smaller'
90
                f' than patch size {tuple(image_size)}'
91
            )
92
            raise ValueError(message)
93
        if np.any(patch_overlap % 2):
94
            message = (
95
                'Patch overlap must be a tuple of even integers,'
96
                f' not {tuple(patch_overlap)}'
97
            )
98
            raise ValueError(message)
99
100
    def extract_patch(
101
            self,
102
            sample: Subject,
103
            index_ini: TypeTripletInt,
104
            index_fin: TypeTripletInt,
105
            ) -> Subject:
106
        crop = self.get_crop_transform(
107
            sample.spatial_shape,
108
            index_ini,
109
            index_fin - index_ini,
110
        )
111
        cropped_sample = crop(sample)
112
        return cropped_sample
113
114
    @staticmethod
115
    def get_patches_locations(
116
            image_size: TypeTripletInt,
117
            patch_size: TypeTripletInt,
118
            patch_overlap: TypeTripletInt,
119
            ) -> np.ndarray:
120
        indices = []
121
        zipped = zip(image_size, patch_size, patch_overlap)
122
        for im_size_dim, patch_size_dim, patch_overlap_dim in zipped:
123
            end = im_size_dim + 1 - patch_size_dim
124
            step = patch_size_dim - patch_overlap_dim
125
            indices_dim = list(range(0, end, step))
126
            if im_size_dim % step:
127
                indices_dim.append(im_size_dim - patch_size_dim)
128
            indices.append(indices_dim)
129
        indices_ini = np.array(np.meshgrid(*indices)).reshape(3, -1).T
130
        indices_ini = np.unique(indices_ini, axis=0)
131
        indices_fin = indices_ini + np.array(patch_size)
132
        locations = np.hstack((indices_ini, indices_fin))
133
        return np.array(sorted(locations.tolist()))
134