| @@ 134-190 (lines=57) @@ | ||
| 131 | print('Successful parameter calculations for {}'.format(iter_name)) |
|
| 132 | ||
| 133 | ||
| 134 | if __name__ == "__main__": |
|
| 135 | description = ('Download input from an S3 bucket and provide that input ' |
|
| 136 | 'to our function. On return put output in an S3 bucket.') |
|
| 137 | ||
| 138 | parser = ArgumentParser(description=description) |
|
| 139 | ||
| 140 | parser.add_argument( |
|
| 141 | 'bucket', metavar='bucket', type=str, |
|
| 142 | help='The S3 bucket for pulling input and pushing output.' |
|
| 143 | ) |
|
| 144 | ||
| 145 | parser.add_argument( |
|
| 146 | '--starmap', action='store_true', |
|
| 147 | help='Assume input has already been grouped into a single tuple.' |
|
| 148 | ) |
|
| 149 | ||
| 150 | parser.add_argument( |
|
| 151 | '--arrayjob', action='store_true', |
|
| 152 | help='If True, this is an array job and it should reference the ' |
|
| 153 | 'AWS_BATCH_JOB_ARRAY_INDEX environment variable.' |
|
| 154 | ) |
|
| 155 | ||
| 156 | parser.add_argument( |
|
| 157 | '--sse', dest='sse', action='store', |
|
| 158 | choices=['AES256', 'aws:kms'], default=None, |
|
| 159 | help='Server side encryption algorithm used when storing objects ' |
|
| 160 | 'in S3.' |
|
| 161 | ) |
|
| 162 | ||
| 163 | args = parser.parse_args() |
|
| 164 | ||
| 165 | s3 = boto3.client('s3') |
|
| 166 | bucket = args.bucket |
|
| 167 | ||
| 168 | jobid = os.environ.get("AWS_BATCH_JOB_ID") |
|
| 169 | ||
| 170 | if args.arrayjob: |
|
| 171 | jobid = jobid.split(':')[0] |
|
| 172 | ||
| 173 | key = '/'.join([ |
|
| 174 | 'cloudknot.jobs', |
|
| 175 | os.environ.get("CLOUDKNOT_S3_JOBDEF_KEY"), |
|
| 176 | jobid, |
|
| 177 | 'input.pickle' |
|
| 178 | ]) |
|
| 179 | ||
| 180 | response = s3.get_object(Bucket=bucket, Key=key) |
|
| 181 | input_ = pickle.loads(response.get('Body').read()) |
|
| 182 | ||
| 183 | if args.arrayjob: |
|
| 184 | array_index = int(os.environ.get("AWS_BATCH_JOB_ARRAY_INDEX")) |
|
| 185 | input_ = input_[array_index] |
|
| 186 | ||
| 187 | if args.starmap: |
|
| 188 | pickle_to_s3(args.sse, args.arrayjob)(sensitivity_it)(*input_) |
|
| 189 | else: |
|
| 190 | pickle_to_s3(args.sse, args.arrayjob)(sensitivity_it)(input_) |
|
| 191 | ||
| @@ 134-190 (lines=57) @@ | ||
| 131 | print('Successful parameter calculations for {}'.format(iter_name)) |
|
| 132 | ||
| 133 | ||
| 134 | if __name__ == "__main__": |
|
| 135 | description = ('Download input from an S3 bucket and provide that input ' |
|
| 136 | 'to our function. On return put output in an S3 bucket.') |
|
| 137 | ||
| 138 | parser = ArgumentParser(description=description) |
|
| 139 | ||
| 140 | parser.add_argument( |
|
| 141 | 'bucket', metavar='bucket', type=str, |
|
| 142 | help='The S3 bucket for pulling input and pushing output.' |
|
| 143 | ) |
|
| 144 | ||
| 145 | parser.add_argument( |
|
| 146 | '--starmap', action='store_true', |
|
| 147 | help='Assume input has already been grouped into a single tuple.' |
|
| 148 | ) |
|
| 149 | ||
| 150 | parser.add_argument( |
|
| 151 | '--arrayjob', action='store_true', |
|
| 152 | help='If True, this is an array job and it should reference the ' |
|
| 153 | 'AWS_BATCH_JOB_ARRAY_INDEX environment variable.' |
|
| 154 | ) |
|
| 155 | ||
| 156 | parser.add_argument( |
|
| 157 | '--sse', dest='sse', action='store', |
|
| 158 | choices=['AES256', 'aws:kms'], default=None, |
|
| 159 | help='Server side encryption algorithm used when storing objects ' |
|
| 160 | 'in S3.' |
|
| 161 | ) |
|
| 162 | ||
| 163 | args = parser.parse_args() |
|
| 164 | ||
| 165 | s3 = boto3.client('s3') |
|
| 166 | bucket = args.bucket |
|
| 167 | ||
| 168 | jobid = os.environ.get("AWS_BATCH_JOB_ID") |
|
| 169 | ||
| 170 | if args.arrayjob: |
|
| 171 | jobid = jobid.split(':')[0] |
|
| 172 | ||
| 173 | key = '/'.join([ |
|
| 174 | 'cloudknot.jobs', |
|
| 175 | os.environ.get("CLOUDKNOT_S3_JOBDEF_KEY"), |
|
| 176 | jobid, |
|
| 177 | 'input.pickle' |
|
| 178 | ]) |
|
| 179 | ||
| 180 | response = s3.get_object(Bucket=bucket, Key=key) |
|
| 181 | input_ = pickle.loads(response.get('Body').read()) |
|
| 182 | ||
| 183 | if args.arrayjob: |
|
| 184 | array_index = int(os.environ.get("AWS_BATCH_JOB_ARRAY_INDEX")) |
|
| 185 | input_ = input_[array_index] |
|
| 186 | ||
| 187 | if args.starmap: |
|
| 188 | pickle_to_s3(args.sse, args.arrayjob)(sensitivity_it)(*input_) |
|
| 189 | else: |
|
| 190 | pickle_to_s3(args.sse, args.arrayjob)(sensitivity_it)(input_) |
|
| 191 | ||