Skip to content

Commit

Permalink
move metrics_collector_spec back & update helper functions & add retu…
Browse files Browse the repository at this point in the history
…rn type for helper functions

Signed-off-by: helenxie-bit <helenxiehz@gmail.com>
  • Loading branch information
helenxie-bit committed Aug 30, 2024
1 parent c2df967 commit 9f69329
Show file tree
Hide file tree
Showing 2 changed files with 77 additions and 69 deletions.
22 changes: 12 additions & 10 deletions sdk/python/v1beta1/kubeflow/katib/api/katib_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -415,19 +415,21 @@ class name in this argument.
if max_failed_trial_count is not None:
experiment.spec.max_failed_trial_count = max_failed_trial_count

# Add metrics collector to the Katib Experiment.
# Up to now, we only support parameter `kind`, of which default value
# is `StdOut`, to specify the kind of metrics collector.
experiment.spec.metrics_collector_spec = models.V1beta1MetricsCollectorSpec(
collector=models.V1beta1CollectorSpec(kind=metrics_collector_config["kind"])
)

# If users choose to use a custom objective function.
if objective is not None:
# Add metrics collector to the Katib Experiment.
# Up to now, we only support parameter `kind`, of which default value
# is `StdOut`, to specify the kind of metrics collector.
experiment.spec.metrics_collector_spec = models.V1beta1MetricsCollectorSpec(
collector=models.V1beta1CollectorSpec(
kind=metrics_collector_config["kind"]
)
)

# Iterate over input parameters and do substitutions.
experiment_params = []
trial_params = []
input_params = utils.parameter_substitution(
input_params = utils.get_trial_substitutions_from_dict(
parameters, experiment_params, trial_params
)

Expand Down Expand Up @@ -587,10 +589,10 @@ class name in this argument.
# Iterate over input parameters and do substitutions.
experiment_params = []
trial_params = []
training_args = utils.parameter_substitution(
training_args = utils.get_trial_substitutions_from_trainer(
trainer_parameters.training_parameters, experiment_params, trial_params
)
lora_config = utils.parameter_substitution(
lora_config = utils.get_trial_substitutions_from_trainer(
trainer_parameters.lora_config, experiment_params, trial_params
)

Expand Down
124 changes: 65 additions & 59 deletions sdk/python/v1beta1/kubeflow/katib/utils/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,68 +143,74 @@ def default(self, obj):
return json.JSONEncoder.default(self, obj)


def parameter_substitution(
parameters: Union[Dict[str, Any], "TrainingArguments", "LoraConfig"], # noqa: F821
def get_trial_substitutions_from_dict(
parameters: Dict[str, Any],
experiment_params: List[models.V1beta1ParameterSpec],
trial_params: List[models.V1beta1TrialParameterSpec],
):
if isinstance(parameters, dict):
for p_name, p_value in parameters.items():
# If input parameter value is Katib Experiment parameter sample.
if isinstance(p_value, models.V1beta1ParameterSpec):
# Wrap value for the function input.
parameters[p_name] = f"${{trialParameters.{p_name}}}"

# Add value to the Katib Experiment parameters.
p_value.name = p_name
experiment_params.append(p_value)

# Add value to the Katib Experiment's Trial parameters.
trial_params.append(
models.V1beta1TrialParameterSpec(name=p_name, reference=p_name)
)
else:
# Otherwise, add value to the function input.
parameters[p_name] = p_value
) -> Dict[str, str]:
for p_name, p_value in parameters.items():
# If input parameter value is Katib Experiment parameter sample.
if isinstance(p_value, models.V1beta1ParameterSpec):
# Wrap value for the function input.
parameters[p_name] = f"${{trialParameters.{p_name}}}"

# Add value to the Katib Experiment parameters.
p_value.name = p_name
experiment_params.append(p_value)

# Add value to the Katib Experiment's Trial parameters.
trial_params.append(
models.V1beta1TrialParameterSpec(name=p_name, reference=p_name)
)
else:
# Otherwise, add value to the function input.
parameters[p_name] = p_value

else:
from peft import LoraConfig # noqa: F401
from transformers import TrainingArguments # noqa: F401
return parameters

if isinstance(parameters, TrainingArguments):
parameters_dict = parameters.to_dict()
else:
parameters_dict = parameters.__dict__

for p_name, p_value in parameters_dict.items():
if not hasattr(parameters, p_name):
logger.warning(f"Training parameter {p_name} is not supported.")
continue

if isinstance(p_value, models.V1beta1ParameterSpec):
old_attr = getattr(parameters, p_name, None)
if old_attr is not None:
value = f"${{trialParameters.{p_name}}}"
setattr(parameters, p_name, value)
p_value.name = p_name
experiment_params.append(p_value)
trial_params.append(
models.V1beta1TrialParameterSpec(name=p_name, reference=p_name)
)
elif p_value is not None:
old_attr = getattr(parameters, p_name, None)
if old_attr is not None:
if isinstance(p_value, dict):
# Update the existing dictionary without nesting
value = copy.deepcopy(p_value)
else:
value = type(old_attr)(p_value)
setattr(parameters, p_name, value)

if isinstance(parameters, TrainingArguments):
parameters = json.dumps(parameters.to_dict())
else:
parameters = json.dumps(parameters.__dict__, cls=SetEncoder)

def get_trial_substitutions_from_trainer(
parameters: Union["TrainingArguments", "LoraConfig"], # noqa: F821
experiment_params: List[models.V1beta1ParameterSpec],
trial_params: List[models.V1beta1TrialParameterSpec],
) -> Dict[str, str]:
from peft import LoraConfig # noqa: F401
from transformers import TrainingArguments # noqa: F401

if isinstance(parameters, TrainingArguments):
parameters_dict = parameters.to_dict()
else:
parameters_dict = parameters.__dict__

for p_name, p_value in parameters_dict.items():
if not hasattr(parameters, p_name):
logger.warning(f"Training parameter {p_name} is not supported.")
continue

if isinstance(p_value, models.V1beta1ParameterSpec):
old_attr = getattr(parameters, p_name, None)
if old_attr is not None:
value = f"${{trialParameters.{p_name}}}"
setattr(parameters, p_name, value)
p_value.name = p_name
experiment_params.append(p_value)
trial_params.append(
models.V1beta1TrialParameterSpec(name=p_name, reference=p_name)
)
elif p_value is not None:
old_attr = getattr(parameters, p_name, None)
if old_attr is not None:
if isinstance(p_value, dict):
# Update the existing dictionary without nesting
value = copy.deepcopy(p_value)
else:
value = type(old_attr)(p_value)
setattr(parameters, p_name, value)

if isinstance(parameters, TrainingArguments):
parameters = json.dumps(parameters.to_dict())
else:
parameters = json.dumps(parameters.__dict__, cls=SetEncoder)

return parameters

Expand All @@ -214,7 +220,7 @@ def get_exec_script_from_objective(
input_params: Dict[str, Any] = None,
packages_to_install: Optional[List[str]] = None,
pip_index_url: str = "https://pypi.org/simple",
):
) -> str:
"""
Get executable script for container args from the given objective function and parameters.
"""
Expand Down

0 comments on commit 9f69329

Please sign in to comment.