|
@@ 93-110 (lines=18) @@
|
| 90 |
|
del output_filenames[0:], fluxes[0:], ivars[0:] |
| 91 |
|
|
| 92 |
|
|
| 93 |
|
if len(output_filenames) > 0: |
| 94 |
|
|
| 95 |
|
results, covs, metas = model.fit(fluxes, ivars, |
| 96 |
|
initial_labels=initial_labels, model_redshift=fit_velocity, |
| 97 |
|
full_output=True) |
| 98 |
|
|
| 99 |
|
for result, cov, meta, output_filename \ |
| 100 |
|
in zip(results, covs, metas, output_filenames): |
| 101 |
|
|
| 102 |
|
for key in delete_meta_keys: |
| 103 |
|
if key in meta: |
| 104 |
|
del meta[key] |
| 105 |
|
|
| 106 |
|
with open(output_filename, "wb") as fp: |
| 107 |
|
pickle.dump((result, cov, meta), fp, 2) # For legacy. |
| 108 |
|
logger.info("Saved output to {}".format(output_filename)) |
| 109 |
|
|
| 110 |
|
del output_filenames[0:], fluxes[0:], ivars[0:] |
| 111 |
|
|
| 112 |
|
|
| 113 |
|
logger.info("Number of failures: {}".format(failures)) |
|
@@ 73-90 (lines=18) @@
|
| 70 |
|
failures += 1 |
| 71 |
|
|
| 72 |
|
else: |
| 73 |
|
if len(output_filenames) >= chunk_size: |
| 74 |
|
|
| 75 |
|
results, covs, metas = model.fit(fluxes, ivars, |
| 76 |
|
initial_labels=initial_labels, model_redshift=fit_velocity, |
| 77 |
|
full_output=True) |
| 78 |
|
|
| 79 |
|
for result, cov, meta, output_filename \ |
| 80 |
|
in zip(results, covs, metas, output_filenames): |
| 81 |
|
|
| 82 |
|
for key in delete_meta_keys: |
| 83 |
|
if key in meta: |
| 84 |
|
del meta[key] |
| 85 |
|
|
| 86 |
|
with open(output_filename, "wb") as fp: |
| 87 |
|
pickle.dump((result, cov, meta), fp, 2) # For legacy. |
| 88 |
|
logger.info("Saved output to {}".format(output_filename)) |
| 89 |
|
|
| 90 |
|
del output_filenames[0:], fluxes[0:], ivars[0:] |
| 91 |
|
|
| 92 |
|
|
| 93 |
|
if len(output_filenames) > 0: |