Skip to content

Commit

Permalink
Merge pull request #329 from 4dn-dcic/fill_default
Browse files Browse the repository at this point in the history
Fill default
  • Loading branch information
SooLee authored Apr 1, 2021
2 parents c2561ee + dd14e27 commit 9c64c0e
Show file tree
Hide file tree
Showing 4 changed files with 31 additions and 20 deletions.
3 changes: 2 additions & 1 deletion tests/awsf3/postrunjson/GBPtlqb2rFGH.postrun.json
Original file line number Diff line number Diff line change
Expand Up @@ -144,6 +144,7 @@
"shutdown_min": "now",
"spot_duration": "",
"spot_instance": true,
"subnet": ""
"subnet": "",
"use_benchmark": false
}
}
17 changes: 9 additions & 8 deletions tests/tibanna/unicorn/test_ec2_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -243,7 +243,7 @@ def test_execution_mem_cpu():
assert 'args' in unicorn_dict
assert 'config' in unicorn_dict
assert 'instance_type' in unicorn_dict['config']
assert unicorn_dict['config']['instance_type'] == 't3.micro'
assert unicorn_dict['config']['instance_type'] == 't3.small'


def test_execution_benchmark():
Expand All @@ -265,7 +265,7 @@ def test_execution_benchmark():
assert 'config' in unicorn_dict
assert 'instance_type' in unicorn_dict['config']
assert unicorn_dict['config']['instance_type'] == 't3.micro'
assert unicorn_dict['config']['ebs_size'] == 10
assert unicorn_dict['config']['ebs_size'] == 15
# cleanup afterwards
s3.delete_objects(Bucket='tibanna-output',
Delete={'Objects': [{'Key': randomstr}]})
Expand Down Expand Up @@ -397,7 +397,7 @@ def test_update_config_ebs_size():
execution = Execution(input_dict)
execution.input_size_in_bytes = execution.get_input_size_in_bytes()
execution.update_config_ebs_size()
assert execution.cfg.ebs_size == 10
assert execution.cfg.ebs_size == 15
# cleanup afterwards
s3.delete_objects(Bucket='tibanna-output',
Delete={'Objects': [{'Key': randomstr}]})
Expand All @@ -415,7 +415,8 @@ def test_update_config_ebs_size2():
'app_name': 'md5',
'cwl_main_filename': 'md5.cwl',
'cwl_directory_url': 'someurl'},
'config': {'log_bucket': 'tibanna-output', 'ebs_size': '5000000000x'}}
'config': {'log_bucket': 'tibanna-output', 'ebs_size': '5000000000x',
'ebs_size_as_is': True}}
execution = Execution(input_dict)
execution.input_size_in_bytes = execution.get_input_size_in_bytes()
execution.update_config_ebs_size()
Expand Down Expand Up @@ -629,7 +630,7 @@ def test_launch_args():
launch_args = execution.launch_args
print(launch_args)
assert launch_args
assert 't3.micro' in str(launch_args)
assert 't3.small' in str(launch_args)
assert 'InstanceMarketOptions' in str(launch_args)


Expand Down Expand Up @@ -694,7 +695,7 @@ def test_ec2_exception_coordinator4():
'cwl_main_filename': 'md5.cwl',
'cwl_directory_url': 'someurl'},
'config': {'log_bucket': log_bucket, 'mem': 1, 'cpu': 1,
'spot_instance': True,
'spot_instance': True, 'mem_as_is': True,
'behavior_on_capacity_limit': 'other_instance_types'},
'jobid': jobid}
execution = Execution(input_dict, dryrun=True)
Expand Down Expand Up @@ -779,7 +780,7 @@ def test_ec2_exception_coordinator8():
'cwl_main_filename': 'md5.cwl',
'cwl_directory_url': 'someurl'},
'config': {'log_bucket': log_bucket, 'instance_type': 't2.micro',
'mem': 1, 'cpu': 1,
'mem': 1, 'cpu': 1, 'mem_as_is': True,
'behavior_on_capacity_limit': 'other_instance_types'},
'jobid': jobid}
execution = Execution(input_dict, dryrun=True)
Expand All @@ -803,7 +804,7 @@ def test_ec2_exception_coordinator9():
'cwl_main_filename': 'md5.cwl',
'cwl_directory_url': 'someurl'},
'config': {'log_bucket': log_bucket,
'mem': 2, 'cpu': 1,
'mem': 2, 'cpu': 1, 'mem_as_is': True,
'behavior_on_capacity_limit': 'other_instance_types'},
'jobid': jobid}
execution = Execution(input_dict, dryrun=True)
Expand Down
2 changes: 1 addition & 1 deletion tibanna/_version.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
"""Version information."""

# The following line *must* be the last in the module, exactly as formatted:
__version__ = "1.1.3"
__version__ = "1.2.0b2"
29 changes: 19 additions & 10 deletions tibanna/ec2_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,13 +79,14 @@ def auto_fill(self):
"""
args = self.args
cfg = self.cfg
args.fill_default()
cfg.fill_language_options(args.language, getattr(args, 'singularity', False))
cfg.fill_other_fields(args.app_name)
# sanity check
if args.app_name and args.app_name in B.app_name_function_map:
pass # use benchmarking
else:
if not cfg.ebs_size: # unset (set to 0)
if not cfg.ebs_size or cfg.ebs_size < 10: # unset (set to 0) or <10GB
cfg.ebs_size = 10 # if not set by user or benchmark, just use 10GB as default
if not cfg.EBS_optimized: # either false or unset
cfg.EBS_optimized = False # False by default so t2 instances can be used
Expand Down Expand Up @@ -118,8 +119,8 @@ def __init__(self, fill_default=True, **kwargs):
for field in ['output_S3_bucket']:
if not hasattr(self, field):
raise MissingFieldInInputJsonException("field %s is required in args" % field)
if fill_default:
self.fill_default()
#if fill_default:
# self.fill_default()

def update(self, d):
for k, v in d.items():
Expand Down Expand Up @@ -300,6 +301,10 @@ def fill_default(self):
self.root_ebs_size = DEFAULT_ROOT_EBS_SIZE
if not hasattr(self, 'awsf_image'):
self.awsf_image = DEFAULT_AWSF_IMAGE
if not hasattr(self, 'mem_as_is'): # if false, add 1GB overhead
self.mem_as_is = False
if not hasattr(self, 'ebs_size_as_is'): # if false, add 5GB overhead
self.ebs_size_as_is = False

def fill_internal(self):
# fill internally-used fields (users cannot specify these fields)
Expand Down Expand Up @@ -372,7 +377,11 @@ def init_instance_type_list(self):
'EBS_optimized': False})
# user specified mem and cpu
if self.cfg.mem and self.cfg.cpu:
list0 = get_instance_types(self.cfg.cpu, self.cfg.mem, instance_list(exclude_t=False))
if self.cfg.mem_as_is:
mem = self.cfg.mem
else:
mem = self.cfg.mem + 1
list0 = get_instance_types(self.cfg.cpu, mem, instance_list(exclude_t=False))
nonredundant_list = [i for i in list0 if i['instance_type'] != instance_type]
instance_type_dlist.extend(nonredundant_list)
# user specifically wanted EBS_optimized instances
Expand Down Expand Up @@ -420,7 +429,7 @@ def total_input_size_in_gb(self):
except:
return None

def auto_calculate_ebs_size(self):
def update_config_ebs_size(self):
"""if ebs_size is in the format of e.g. '3x', it updates the size
to be total input size times three. If the value is lower than 10GB,
keep 10GB"""
Expand All @@ -435,13 +444,13 @@ def auto_calculate_ebs_size(self):
self.cfg.ebs_size = round(self.cfg.ebs_size) + 1
else:
self.cfg.ebs_size = round(self.cfg.ebs_size)
if self.cfg.ebs_size < 10:
self.cfg.ebs_size = 10

def update_config_ebs_size(self):
self.auto_calculate_ebs_size() # if in the format of '3x'
if not self.user_specified_ebs_size: # use benchmark only if not set by user
self.cfg.ebs_size = self.benchmark['ebs_size']
if not self.cfg.ebs_size_as_is:
self.cfg.ebs_size += 5 # account for docker image size
if self.cfg.ebs_size < 10:
self.cfg.ebs_size = 10


def get_input_size_in_bytes(self):
input_size_in_bytes = dict()
Expand Down

0 comments on commit 9c64c0e

Please sign in to comment.