| Conditions | 9 |
| Total Lines | 65 |
| Code Lines | 48 |
| Lines | 0 |
| Ratio | 0 % |
| Changes | 0 | ||
Small methods make your code easier to understand, in particular if combined with a good name. Besides, if your method is small, finding a good name is usually much easier.
For example, if you find yourself adding comments to a method's body, this is usually a good sign to extract the commented part to a new method, and use the comment as a starting point when coming up with a good name for this new method.
Commonly applied refactorings include:
If many parameters/temporary variables are present:
| 1 | # Copyright 2014 Diamond Light Source Ltd. |
||
| 80 | def __get_backing_file(self, data_obj, file_name): |
||
| 81 | fname = '%s/%s.h5' % \ |
||
| 82 | (self.exp.get('out_path'), file_name) |
||
| 83 | |||
| 84 | if os.path.exists(fname): |
||
| 85 | return h5py.File(fname, 'r') |
||
| 86 | |||
| 87 | self.hdf5 = Hdf5Utils(self.exp) |
||
| 88 | |||
| 89 | dims_temp = self.parameters['proj_data_dims'].copy() |
||
| 90 | proj_data_dims = tuple(dims_temp) |
||
|
|
|||
| 91 | if (file_name == 'phantom'): |
||
| 92 | dims_temp[0]=dims_temp[1] |
||
| 93 | dims_temp[2]=dims_temp[1] |
||
| 94 | proj_data_dims = tuple(dims_temp) |
||
| 95 | |||
| 96 | patterns = data_obj.get_data_patterns() |
||
| 97 | p_name = list(patterns.keys())[0] |
||
| 98 | p_dict = patterns[p_name] |
||
| 99 | p_dict['max_frames_transfer'] = 1 |
||
| 100 | nnext = {p_name: p_dict} |
||
| 101 | |||
| 102 | pattern_idx = {'current': nnext, 'next': nnext} |
||
| 103 | chunking = Chunking(self.exp, pattern_idx) |
||
| 104 | chunks = chunking._calculate_chunking(proj_data_dims, np.int16) |
||
| 105 | |||
| 106 | h5file = self.hdf5._open_backing_h5(fname, 'w') |
||
| 107 | dset = h5file.create_dataset('test', proj_data_dims, chunks=chunks) |
||
| 108 | |||
| 109 | self.exp._barrier() |
||
| 110 | |||
| 111 | slice_dirs = list(nnext.values())[0]['slice_dims'] |
||
| 112 | nDims = len(dset.shape) |
||
| 113 | total_frames = np.prod([dset.shape[i] for i in slice_dirs]) |
||
| 114 | sub_size = \ |
||
| 115 | [1 if i in slice_dirs else dset.shape[i] for i in range(nDims)] |
||
| 116 | |||
| 117 | # need an mpi barrier after creating the file before populating it |
||
| 118 | idx = 0 |
||
| 119 | sl, total_frames = \ |
||
| 120 | self.__get_start_slice_list(slice_dirs, dset.shape, total_frames) |
||
| 121 | # calculate the first slice |
||
| 122 | for i in range(total_frames): |
||
| 123 | if (file_name == 'synth_proj_data'): |
||
| 124 | #generate projection data |
||
| 125 | gen_data = TomoP3D.ModelSinoSub(self.tomo_model, proj_data_dims[1], proj_data_dims[2], proj_data_dims[1], (i, i+1), -self.angles, self.path_library3D) |
||
| 126 | else: |
||
| 127 | #generate phantom data |
||
| 128 | gen_data = TomoP3D.ModelSub(self.tomo_model, proj_data_dims[1], (i, i+1), self.path_library3D) |
||
| 129 | dset[tuple(sl)] = np.swapaxes(gen_data,0,1) |
||
| 130 | if sl[slice_dirs[idx]].stop == dset.shape[slice_dirs[idx]]: |
||
| 131 | idx += 1 |
||
| 132 | if idx == len(slice_dirs): |
||
| 133 | break |
||
| 134 | tmp = sl[slice_dirs[idx]] |
||
| 135 | sl[slice_dirs[idx]] = slice(tmp.start+1, tmp.stop+1) |
||
| 136 | |||
| 137 | self.exp._barrier() |
||
| 138 | |||
| 139 | try: |
||
| 140 | h5file.close() |
||
| 141 | except IOError as exc: |
||
| 142 | logging.debug('There was a problem trying to close the file in random_hdf5_loader') |
||
| 143 | |||
| 144 | return self.hdf5._open_backing_h5(fname, 'r') |
||
| 145 | |||
| 194 |