7
7
import inspect
8
8
import itertools
9
9
from types import MethodType
10
+ from typing import List
10
11
11
12
import numpy as np
12
13
@@ -96,6 +97,33 @@ def filter_attributes(ctx, f, **kwargs):
96
97
_process_exclusion (ctx , cls_attrs , kwargs ["exclude" ], f )
97
98
98
99
100
+ def validate_data_types (
101
+ prohibited_data_types : List [str ], reserved_words = ["collaborators" ], ** kwargs
102
+ ):
103
+ """Validates that the types of attributes in kwargs are not among the prohibited data types.
104
+ Raises a TypeError if any prohibited data type is found.
105
+
106
+ Args:
107
+ prohibited_data_types (List[str]): A list of prohibited data type names
108
+ (e.g., ['int', 'float']).
109
+ reserved_words: A list of strings that should be allowed as attribute values, even if 'str'
110
+ is included in prohibited_data_types.
111
+ kwargs (dict): Arbitrary keyword arguments representing attribute names and their values.
112
+
113
+ Raises:
114
+ TypeError: If any prohibited data types are found in kwargs.
115
+ ValueError: If prohibited_data_types is empty.
116
+ """
117
+ if not prohibited_data_types :
118
+ raise ValueError ("prohibited_data_types must not be empty." )
119
+ for attr_name , attr_value in kwargs .items ():
120
+ if type (attr_value ).__name__ in prohibited_data_types and attr_value not in reserved_words :
121
+ raise TypeError (
122
+ f"The attribute '{ attr_name } ' = '{ attr_value } ' has a prohibited value type: "
123
+ f"{ type (attr_value ).__name__ } "
124
+ )
125
+
126
+
99
127
def _validate_include_exclude (kwargs , cls_attrs ):
100
128
"""Validates that 'include' and 'exclude' are not both present, and that
101
129
attributes in 'include' or 'exclude' exist in the context.
@@ -152,13 +180,13 @@ def _process_exclusion(ctx, cls_attrs, exclude_list, f):
152
180
delattr (ctx , attr )
153
181
154
182
155
- def checkpoint (ctx , parent_func , chkpnt_reserved_words = ["next" , "runtime" ]):
183
+ def checkpoint (ctx , parent_func , checkpoint_reserved_words = ["next" , "runtime" ]):
156
184
"""Optionally saves the current state for the task just executed.
157
185
158
186
Args:
159
187
ctx (any): The context to checkpoint.
160
188
parent_func (function): The function that was just executed.
161
- chkpnt_reserved_words (list, optional): A list of reserved words to
189
+ checkpoint_reserved_words (list, optional): A list of reserved words to
162
190
exclude from checkpointing. Defaults to ["next", "runtime"].
163
191
164
192
Returns:
@@ -173,7 +201,7 @@ def checkpoint(ctx, parent_func, chkpnt_reserved_words=["next", "runtime"]):
173
201
if ctx ._checkpoint :
174
202
# all objects will be serialized using Metaflow interface
175
203
print (f"Saving data artifacts for { parent_func .__name__ } " )
176
- artifacts_iter , _ = generate_artifacts (ctx = ctx , reserved_words = chkpnt_reserved_words )
204
+ artifacts_iter , _ = generate_artifacts (ctx = ctx , reserved_words = checkpoint_reserved_words )
177
205
task_id = ctx ._metaflow_interface .create_task (parent_func .__name__ )
178
206
ctx ._metaflow_interface .save_artifacts (
179
207
artifacts_iter (),
@@ -195,7 +223,7 @@ def old_check_resource_allocation(num_gpus, each_participant_gpu_usage):
195
223
# But at this point the function will raise an error because
196
224
# remaining_gpu_memory is never cleared.
197
225
# The participant list should remove the participant if it fits in the gpu
198
- # and save the partipant if it doesn't and continue to the next GPU to see
226
+ # and save the participant if it doesn't and continue to the next GPU to see
199
227
# if it fits in that one, only if we run out of GPUs should this function
200
228
# raise an error.
201
229
for gpu in np .ones (num_gpus , dtype = int ):
@@ -230,7 +258,7 @@ def check_resource_allocation(num_gpus, each_participant_gpu_usage):
230
258
if gpu == 0 :
231
259
break
232
260
if gpu < participant_gpu_usage :
233
- # participant doesn't fitm break to next GPU
261
+ # participant doesn't fit, break to next GPU
234
262
break
235
263
else :
236
264
# if participant fits remove from need_assigned
0 commit comments