| Conditions | 9 |
| Total Lines | 65 |
| Code Lines | 48 |
| Lines | 0 |
| Ratio | 0 % |
| Changes | 0 | ||
Small methods make your code easier to understand, in particular if combined with a good name. Besides, if your method is small, finding a good name is usually much easier.
For example, if you find yourself adding comments to a method's body, this is usually a good sign to extract the commented part to a new method, and use the comment as a starting point when coming up with a good name for this new method.
Commonly applied refactorings include:
If many parameters/temporary variables are present:
| 1 | # Copyright 2014 Diamond Light Source Ltd. |
||
| 78 | def __get_backing_file(self, data_obj, file_name): |
||
| 79 | fname = '%s/%s.h5' % \ |
||
| 80 | (self.exp.get('out_path'), file_name) |
||
| 81 | |||
| 82 | if os.path.exists(fname): |
||
| 83 | return h5py.File(fname, 'r') |
||
| 84 | |||
| 85 | self.hdf5 = Hdf5Utils(self.exp) |
||
| 86 | |||
| 87 | dims_temp = self.parameters['proj_data_dims'].copy() |
||
| 88 | proj_data_dims = tuple(dims_temp) |
||
|
|
|||
| 89 | if (file_name == 'phantom'): |
||
| 90 | dims_temp[0]=dims_temp[1] |
||
| 91 | dims_temp[2]=dims_temp[1] |
||
| 92 | proj_data_dims = tuple(dims_temp) |
||
| 93 | |||
| 94 | patterns = data_obj.get_data_patterns() |
||
| 95 | p_name = list(patterns.keys())[0] |
||
| 96 | p_dict = patterns[p_name] |
||
| 97 | p_dict['max_frames_transfer'] = 1 |
||
| 98 | nnext = {p_name: p_dict} |
||
| 99 | |||
| 100 | pattern_idx = {'current': nnext, 'next': nnext} |
||
| 101 | chunking = Chunking(self.exp, pattern_idx) |
||
| 102 | chunks = chunking._calculate_chunking(proj_data_dims, np.int16) |
||
| 103 | |||
| 104 | h5file = self.hdf5._open_backing_h5(fname, 'w') |
||
| 105 | dset = h5file.create_dataset('test', proj_data_dims, chunks=chunks) |
||
| 106 | |||
| 107 | self.exp._barrier() |
||
| 108 | |||
| 109 | slice_dirs = list(nnext.values())[0]['slice_dims'] |
||
| 110 | nDims = len(dset.shape) |
||
| 111 | total_frames = np.prod([dset.shape[i] for i in slice_dirs]) |
||
| 112 | sub_size = \ |
||
| 113 | [1 if i in slice_dirs else dset.shape[i] for i in range(nDims)] |
||
| 114 | |||
| 115 | # need an mpi barrier after creating the file before populating it |
||
| 116 | idx = 0 |
||
| 117 | sl, total_frames = \ |
||
| 118 | self.__get_start_slice_list(slice_dirs, dset.shape, total_frames) |
||
| 119 | # calculate the first slice |
||
| 120 | for i in range(total_frames): |
||
| 121 | if (file_name == 'synth_proj_data'): |
||
| 122 | #generate projection data |
||
| 123 | gen_data = TomoP3D.ModelSinoSub(self.tomo_model, proj_data_dims[1], proj_data_dims[2], proj_data_dims[1], (i, i+1), -self.angles, self.path_library3D) |
||
| 124 | else: |
||
| 125 | #generate phantom data |
||
| 126 | gen_data = TomoP3D.ModelSub(self.tomo_model, proj_data_dims[1], (i, i+1), self.path_library3D) |
||
| 127 | dset[tuple(sl)] = np.swapaxes(gen_data,0,1) |
||
| 128 | if sl[slice_dirs[idx]].stop == dset.shape[slice_dirs[idx]]: |
||
| 129 | idx += 1 |
||
| 130 | if idx == len(slice_dirs): |
||
| 131 | break |
||
| 132 | tmp = sl[slice_dirs[idx]] |
||
| 133 | sl[slice_dirs[idx]] = slice(tmp.start+1, tmp.stop+1) |
||
| 134 | |||
| 135 | self.exp._barrier() |
||
| 136 | |||
| 137 | try: |
||
| 138 | h5file.close() |
||
| 139 | except IOError as exc: |
||
| 140 | logging.debug('There was a problem trying to close the file in random_hdf5_loader') |
||
| 141 | |||
| 142 | return self.hdf5._open_backing_h5(fname, 'r') |
||
| 143 | |||
| 192 |