enpi_api.l2.client.api.ml_api
1from typing import Literal 2 3from loguru import logger 4 5from enpi_api.l1 import openapi_client 6from enpi_api.l2.client.api.file_api import FileApi 7from enpi_api.l2.events.workflow_execution_task_waitable import WorkflowExecutionTaskWaitable 8from enpi_api.l2.events.workflow_execution_waitable import WorkflowExecutionWaitable 9from enpi_api.l2.types.api_error import ApiError, ApiErrorContext 10from enpi_api.l2.types.clone import CloneId 11from enpi_api.l2.types.collection import CollectionId 12from enpi_api.l2.types.execution import Execution 13from enpi_api.l2.types.file import File, FileId 14from enpi_api.l2.types.log import LogLevel 15from enpi_api.l2.types.ml import ( 16 MlAwsEndpointConfig, 17 MlEndpoint, 18 MlEndpointId, 19 MlEndpointSignature, 20 MlflowModelUri, 21 MlInput, 22 MlInvocationId, 23 MlInvocationIntentConfig, 24 MlInvocationStats, 25 MlOutputIntent, 26 MlParam, 27) 28from enpi_api.l2.types.task import TaskState 29from enpi_api.l2.types.workflow import WorkflowExecutionId, WorkflowExecutionTaskId, WorkflowTaskTemplateName 30 31 32class InvocationFailed(Exception): 33 """Indicates that the ML invocation has failed.""" 34 35 def __init__(self, invocation_id: MlInvocationId): 36 """@private""" 37 super().__init__(f"ML invocation with ID `{invocation_id}` failed") 38 39 40class DeploymentFailed(Exception): 41 """Indicates that the ML deployment has failed.""" 42 43 def __init__(self, workflow_execution_task_id: WorkflowExecutionTaskId): 44 """@private""" 45 super().__init__(f"ML deployment task with ID `{workflow_execution_task_id}` failed") 46 47 48class MlApi: 49 _inner_api_client: openapi_client.ApiClient 50 _log_level: LogLevel 51 52 def __init__(self, inner_api_client: openapi_client.ApiClient, log_level: LogLevel): 53 """@private""" 54 self._inner_api_client = inner_api_client 55 self._log_level = log_level 56 57 def get_ml_endpoints(self) -> list[MlEndpoint]: 58 """Get all ML endpoints. 59 60 Returns: 61 list[enpi_api.l2.types.ml.MlEndpoint]: A list of ML endpoints. 62 """ 63 64 ml_api_instance = openapi_client.MlApi(self._inner_api_client) 65 try: 66 return [MlEndpoint.from_raw(i) for i in ml_api_instance.get_ml_endpoints().ml_endpoints] 67 except openapi_client.ApiException as e: 68 raise ApiError(e) 69 70 def get_ml_invocation_stats(self) -> list[MlInvocationStats]: 71 """Get ML invocation statistics. 72 73 Returns: 74 list[enpi_api.l2.types.ml.MlInvocationStats]: A list of ML invocation statistics. 75 """ 76 ml_api_instance = openapi_client.MlApi(self._inner_api_client) 77 try: 78 stats = ml_api_instance.get_ml_invocation_stats().stats 79 return [MlInvocationStats.from_raw(i) for i in stats] 80 except openapi_client.ApiException as e: 81 raise ApiError(e) 82 83 def register_ml_endpoint( 84 self, 85 display_name: str, 86 input_mapping: list[MlInput], 87 output_intents: list[MlOutputIntent], 88 vendor_config: MlAwsEndpointConfig, 89 signatures: list[MlEndpointSignature], 90 parameter_mapping: list[MlParam] | None = None, 91 ) -> MlEndpointId: 92 """Register a ML endpoint. 93 94 Args: 95 display_name (str): The display name of the ML endpoint. 96 input_mapping (list[MlInput]): The input mapping of the ML endpoint. 97 output_intents (list[MlOutputIntent]): The output intents of the ML endpoint. 98 vendor_config (MlAwsEndpointConfig): The AWS endpoint configuration of the ML endpoint. 99 signatures (list[MlEndpointSignature]): The signatures of the ML endpoint. 100 parameter_mapping (list[MlParam] | None): The parameter mapping of the ML endpoint. 101 102 Returns: 103 endpoint_id (str): The unique identifier of a ML endpoint. 104 """ 105 106 ml_api_instance = openapi_client.MlApi(self._inner_api_client) 107 try: 108 parameter_mapping_items = ( 109 [ 110 openapi_client.MlParameterMapItem.model_validate( 111 { 112 "type": pm["type"], 113 "input_key": pm["input_key"], 114 "label": pm["label"] if "label" in pm and pm["label"] is not None else None, 115 "source": "parameter", 116 } 117 ) 118 for pm in parameter_mapping 119 ] 120 if parameter_mapping is not None 121 else None 122 ) 123 124 vendor_config_to_register = openapi_client.MlAwsEndpointConfig.from_dict( 125 {**vendor_config, "region": vendor_config.get("region", "eu-west-1"), "endpoint_type": "external"} 126 ) 127 assert vendor_config_to_register is not None 128 129 result = ml_api_instance.register_ml_endpoint( 130 register_ml_endpoint_request=openapi_client.RegisterMlEndpointRequest( 131 display_name=display_name, 132 input_mapping=[openapi_client.MlInput.model_validate(i) for i in input_mapping], 133 parameter_mapping=parameter_mapping_items, 134 output_intents=[openapi_client.MlOutputIntent.from_dict(dict(i)) for i in output_intents], 135 vendor_config=vendor_config_to_register, 136 signatures=[openapi_client.MlEndpointSignature.model_validate(i) for i in signatures], 137 ) 138 ) 139 return MlEndpointId(result.endpoint_id) 140 except openapi_client.ApiException as e: 141 raise ApiError(e) 142 143 def unregister_ml_endpoint(self, endpoint_id: MlEndpointId) -> None: 144 """Unregister a ML endpoint. 145 146 Args: 147 endpoint_id (MlEndpointId): The unique identifier of a ML endpoint. 148 """ 149 150 ml_api_instance = openapi_client.MlApi(self._inner_api_client) 151 try: 152 ml_api_instance.unregister_ml_endpoint(id=endpoint_id) 153 except openapi_client.ApiException as e: 154 raise ApiError(e) 155 156 def deploy_model( 157 self, 158 display_name: str, 159 input_mapping: list[MlInput], 160 output_intents: list[MlOutputIntent], 161 model_uri: MlflowModelUri, 162 signatures: list[MlEndpointSignature], 163 parameter_mapping: list[MlParam] | None = None, 164 base_image: Literal["cpu"] | Literal["gpu"] = "cpu", 165 ) -> Execution[MlEndpointId]: 166 """Deploy a model from ML flow as a SageMaker model with an endpoint. 167 168 Args: 169 display_name (str): The display name of the ML endpoint. 170 input_mapping (list[MlInput]): The input mapping of the ML endpoint. 171 output_intents (list[MlOutputIntent]): The output intents of the ML endpoint. 172 model_uri (MlFlowModelUri): The URI of a MLflow model. 173 parameter_mapping (list[MlParam] | None): The parameter mapping of the ML endpoint. 174 """ 175 ml_api_instance = openapi_client.MlApi(self._inner_api_client) 176 parameter_mapping_items = ( 177 [ 178 openapi_client.MlParameterMapItem.model_validate( 179 { 180 "type": pm["type"], 181 "input_key": pm["input_key"], 182 "label": pm["label"] if "label" in pm and pm["label"] is not None else None, 183 "source": "parameter", 184 "enum_options": pm.get("enum_options"), 185 } 186 ) 187 for pm in parameter_mapping 188 ] 189 if parameter_mapping is not None 190 else None 191 ) 192 193 payload = openapi_client.DeployModelRequest( 194 display_name=display_name, 195 parameter_mapping=parameter_mapping_items, 196 input_mapping=[openapi_client.MlInput.model_validate(i) for i in input_mapping], 197 output_intents=[openapi_client.MlOutputIntent.from_dict(dict(i)) for i in output_intents], 198 model_uri=model_uri, 199 signatures=[openapi_client.MlEndpointSignature.model_validate(i) for i in signatures], 200 base_image=base_image, 201 ) 202 203 with ApiErrorContext(): 204 deploy_model_response = ml_api_instance.deploy_model(deploy_model_request=payload) 205 assert deploy_model_response.workflow_execution_id is not None 206 207 workflow_execution_id = WorkflowExecutionId(int(deploy_model_response.workflow_execution_id)) 208 209 def on_complete(task_id: WorkflowExecutionTaskId, task_state: TaskState) -> MlEndpointId: 210 # If the task has succeeded, return the endpoint_id 211 match task_state: 212 case TaskState.SUCCEEDED: 213 result = ml_api_instance.get_endpoint_by_workflow_execution_task_id(workflow_execution_task_id=task_id) 214 return MlEndpointId(result.endpoint_id) 215 case _: 216 raise DeploymentFailed(task_id) 217 218 waitable = WorkflowExecutionTaskWaitable[MlEndpointId]( 219 workflow_execution_id=workflow_execution_id, 220 task_template_name=WorkflowTaskTemplateName.ENPI_APP_ML_MONITOR_INFERENCE_SERVICE, 221 on_complete=on_complete, 222 ) 223 224 return Execution(wait=waitable.wait_and_return_result, check_execution_state=waitable.check_execution_state) 225 226 def undeploy_model(self, endpoint_id: MlEndpointId) -> None: 227 """Remove a SageMaker model and endpoint. 228 229 Args: 230 endpoint_id (MlEndpointId): The unique identifier of a ML endpoint. 231 """ 232 ml_api_instance = openapi_client.MlApi(self._inner_api_client) 233 try: 234 ml_api_instance.undeploy_model(id=endpoint_id) 235 except openapi_client.ApiException as e: 236 raise ApiError(e) 237 238 def invoke_endpoint( 239 self, 240 endpoint_id: MlEndpointId, 241 clone_ids: list[CloneId] | None = None, 242 collection_ids: list[CollectionId] | None = None, 243 parameters: dict[str, str | int | float | bool | list[str]] | None = None, 244 intent_config: MlInvocationIntentConfig | None = None, 245 ) -> Execution[list[File]]: 246 """Invoke a ML endpoint. 247 248 Args: 249 endpoint_id (MlEndpointId): The unique identifier of a ML endpoint. 250 clone_ids (list[CloneId]): The unique identifiers of the clones. 251 parameters (dict | None): parameters passed to the invocation. 252 253 Returns: 254 output_files (list[File]): The list of output files generated by the File Intent of ML model. 255 """ 256 ml_api_instance = openapi_client.MlApi(self._inner_api_client) 257 file_api = FileApi(self._inner_api_client, self._log_level) 258 if clone_ids is not None and collection_ids is not None: 259 raise ValueError("Either clone_ids or collection_ids must be provided, but not both") 260 261 with ApiErrorContext(): 262 invoke_ml_endpoint_request = openapi_client.InvokeMlEndpointRequest.from_dict( 263 dict( 264 clone_ids=clone_ids, 265 collection_ids=collection_ids, 266 parameters=parameters, 267 intent_config=intent_config.model_dump() if intent_config else None, 268 ) 269 ) 270 assert invoke_ml_endpoint_request is not None 271 result = ml_api_instance.invoke_endpoint(id=endpoint_id, invoke_ml_endpoint_request=invoke_ml_endpoint_request) 272 assert result.workflow_execution_id is not None 273 274 workflow_execution_id = WorkflowExecutionId(result.workflow_execution_id) 275 276 def on_complete(workflow_execution_id: WorkflowExecutionId, execution_state: TaskState) -> list[File]: 277 assert execution_state == TaskState.SUCCEEDED, ( 278 f"Workflow execution {workflow_execution_id} did not reach {TaskState.SUCCEEDED} state, got {execution_state} state instead" 279 ) 280 281 logger.success(f"Ml invocation with workflow execution ID: {workflow_execution_id} has successfully finished.") 282 283 # Get potential file intent output files 284 file_intent_output_ids = ml_api_instance.get_invocation_output(workflow_execution_id).file_ids 285 if len(file_intent_output_ids) > 0: 286 logger.success("Ml invocation File Intent output files retrieved successfully") 287 return [file_api.get_file_by_id(FileId(file_id)) for file_id in file_intent_output_ids] 288 else: 289 return [] 290 291 waitable = WorkflowExecutionWaitable( 292 workflow_execution_id=workflow_execution_id, 293 on_complete=on_complete, 294 ) 295 return Execution(wait=waitable.wait_and_return_result, check_execution_state=waitable.check_execution_state)
class
InvocationFailed(builtins.Exception):
33class InvocationFailed(Exception): 34 """Indicates that the ML invocation has failed.""" 35 36 def __init__(self, invocation_id: MlInvocationId): 37 """@private""" 38 super().__init__(f"ML invocation with ID `{invocation_id}` failed")
Indicates that the ML invocation has failed.
class
DeploymentFailed(builtins.Exception):
41class DeploymentFailed(Exception): 42 """Indicates that the ML deployment has failed.""" 43 44 def __init__(self, workflow_execution_task_id: WorkflowExecutionTaskId): 45 """@private""" 46 super().__init__(f"ML deployment task with ID `{workflow_execution_task_id}` failed")
Indicates that the ML deployment has failed.
class
MlApi:
49class MlApi: 50 _inner_api_client: openapi_client.ApiClient 51 _log_level: LogLevel 52 53 def __init__(self, inner_api_client: openapi_client.ApiClient, log_level: LogLevel): 54 """@private""" 55 self._inner_api_client = inner_api_client 56 self._log_level = log_level 57 58 def get_ml_endpoints(self) -> list[MlEndpoint]: 59 """Get all ML endpoints. 60 61 Returns: 62 list[enpi_api.l2.types.ml.MlEndpoint]: A list of ML endpoints. 63 """ 64 65 ml_api_instance = openapi_client.MlApi(self._inner_api_client) 66 try: 67 return [MlEndpoint.from_raw(i) for i in ml_api_instance.get_ml_endpoints().ml_endpoints] 68 except openapi_client.ApiException as e: 69 raise ApiError(e) 70 71 def get_ml_invocation_stats(self) -> list[MlInvocationStats]: 72 """Get ML invocation statistics. 73 74 Returns: 75 list[enpi_api.l2.types.ml.MlInvocationStats]: A list of ML invocation statistics. 76 """ 77 ml_api_instance = openapi_client.MlApi(self._inner_api_client) 78 try: 79 stats = ml_api_instance.get_ml_invocation_stats().stats 80 return [MlInvocationStats.from_raw(i) for i in stats] 81 except openapi_client.ApiException as e: 82 raise ApiError(e) 83 84 def register_ml_endpoint( 85 self, 86 display_name: str, 87 input_mapping: list[MlInput], 88 output_intents: list[MlOutputIntent], 89 vendor_config: MlAwsEndpointConfig, 90 signatures: list[MlEndpointSignature], 91 parameter_mapping: list[MlParam] | None = None, 92 ) -> MlEndpointId: 93 """Register a ML endpoint. 94 95 Args: 96 display_name (str): The display name of the ML endpoint. 97 input_mapping (list[MlInput]): The input mapping of the ML endpoint. 98 output_intents (list[MlOutputIntent]): The output intents of the ML endpoint. 99 vendor_config (MlAwsEndpointConfig): The AWS endpoint configuration of the ML endpoint. 100 signatures (list[MlEndpointSignature]): The signatures of the ML endpoint. 101 parameter_mapping (list[MlParam] | None): The parameter mapping of the ML endpoint. 102 103 Returns: 104 endpoint_id (str): The unique identifier of a ML endpoint. 105 """ 106 107 ml_api_instance = openapi_client.MlApi(self._inner_api_client) 108 try: 109 parameter_mapping_items = ( 110 [ 111 openapi_client.MlParameterMapItem.model_validate( 112 { 113 "type": pm["type"], 114 "input_key": pm["input_key"], 115 "label": pm["label"] if "label" in pm and pm["label"] is not None else None, 116 "source": "parameter", 117 } 118 ) 119 for pm in parameter_mapping 120 ] 121 if parameter_mapping is not None 122 else None 123 ) 124 125 vendor_config_to_register = openapi_client.MlAwsEndpointConfig.from_dict( 126 {**vendor_config, "region": vendor_config.get("region", "eu-west-1"), "endpoint_type": "external"} 127 ) 128 assert vendor_config_to_register is not None 129 130 result = ml_api_instance.register_ml_endpoint( 131 register_ml_endpoint_request=openapi_client.RegisterMlEndpointRequest( 132 display_name=display_name, 133 input_mapping=[openapi_client.MlInput.model_validate(i) for i in input_mapping], 134 parameter_mapping=parameter_mapping_items, 135 output_intents=[openapi_client.MlOutputIntent.from_dict(dict(i)) for i in output_intents], 136 vendor_config=vendor_config_to_register, 137 signatures=[openapi_client.MlEndpointSignature.model_validate(i) for i in signatures], 138 ) 139 ) 140 return MlEndpointId(result.endpoint_id) 141 except openapi_client.ApiException as e: 142 raise ApiError(e) 143 144 def unregister_ml_endpoint(self, endpoint_id: MlEndpointId) -> None: 145 """Unregister a ML endpoint. 146 147 Args: 148 endpoint_id (MlEndpointId): The unique identifier of a ML endpoint. 149 """ 150 151 ml_api_instance = openapi_client.MlApi(self._inner_api_client) 152 try: 153 ml_api_instance.unregister_ml_endpoint(id=endpoint_id) 154 except openapi_client.ApiException as e: 155 raise ApiError(e) 156 157 def deploy_model( 158 self, 159 display_name: str, 160 input_mapping: list[MlInput], 161 output_intents: list[MlOutputIntent], 162 model_uri: MlflowModelUri, 163 signatures: list[MlEndpointSignature], 164 parameter_mapping: list[MlParam] | None = None, 165 base_image: Literal["cpu"] | Literal["gpu"] = "cpu", 166 ) -> Execution[MlEndpointId]: 167 """Deploy a model from ML flow as a SageMaker model with an endpoint. 168 169 Args: 170 display_name (str): The display name of the ML endpoint. 171 input_mapping (list[MlInput]): The input mapping of the ML endpoint. 172 output_intents (list[MlOutputIntent]): The output intents of the ML endpoint. 173 model_uri (MlFlowModelUri): The URI of a MLflow model. 174 parameter_mapping (list[MlParam] | None): The parameter mapping of the ML endpoint. 175 """ 176 ml_api_instance = openapi_client.MlApi(self._inner_api_client) 177 parameter_mapping_items = ( 178 [ 179 openapi_client.MlParameterMapItem.model_validate( 180 { 181 "type": pm["type"], 182 "input_key": pm["input_key"], 183 "label": pm["label"] if "label" in pm and pm["label"] is not None else None, 184 "source": "parameter", 185 "enum_options": pm.get("enum_options"), 186 } 187 ) 188 for pm in parameter_mapping 189 ] 190 if parameter_mapping is not None 191 else None 192 ) 193 194 payload = openapi_client.DeployModelRequest( 195 display_name=display_name, 196 parameter_mapping=parameter_mapping_items, 197 input_mapping=[openapi_client.MlInput.model_validate(i) for i in input_mapping], 198 output_intents=[openapi_client.MlOutputIntent.from_dict(dict(i)) for i in output_intents], 199 model_uri=model_uri, 200 signatures=[openapi_client.MlEndpointSignature.model_validate(i) for i in signatures], 201 base_image=base_image, 202 ) 203 204 with ApiErrorContext(): 205 deploy_model_response = ml_api_instance.deploy_model(deploy_model_request=payload) 206 assert deploy_model_response.workflow_execution_id is not None 207 208 workflow_execution_id = WorkflowExecutionId(int(deploy_model_response.workflow_execution_id)) 209 210 def on_complete(task_id: WorkflowExecutionTaskId, task_state: TaskState) -> MlEndpointId: 211 # If the task has succeeded, return the endpoint_id 212 match task_state: 213 case TaskState.SUCCEEDED: 214 result = ml_api_instance.get_endpoint_by_workflow_execution_task_id(workflow_execution_task_id=task_id) 215 return MlEndpointId(result.endpoint_id) 216 case _: 217 raise DeploymentFailed(task_id) 218 219 waitable = WorkflowExecutionTaskWaitable[MlEndpointId]( 220 workflow_execution_id=workflow_execution_id, 221 task_template_name=WorkflowTaskTemplateName.ENPI_APP_ML_MONITOR_INFERENCE_SERVICE, 222 on_complete=on_complete, 223 ) 224 225 return Execution(wait=waitable.wait_and_return_result, check_execution_state=waitable.check_execution_state) 226 227 def undeploy_model(self, endpoint_id: MlEndpointId) -> None: 228 """Remove a SageMaker model and endpoint. 229 230 Args: 231 endpoint_id (MlEndpointId): The unique identifier of a ML endpoint. 232 """ 233 ml_api_instance = openapi_client.MlApi(self._inner_api_client) 234 try: 235 ml_api_instance.undeploy_model(id=endpoint_id) 236 except openapi_client.ApiException as e: 237 raise ApiError(e) 238 239 def invoke_endpoint( 240 self, 241 endpoint_id: MlEndpointId, 242 clone_ids: list[CloneId] | None = None, 243 collection_ids: list[CollectionId] | None = None, 244 parameters: dict[str, str | int | float | bool | list[str]] | None = None, 245 intent_config: MlInvocationIntentConfig | None = None, 246 ) -> Execution[list[File]]: 247 """Invoke a ML endpoint. 248 249 Args: 250 endpoint_id (MlEndpointId): The unique identifier of a ML endpoint. 251 clone_ids (list[CloneId]): The unique identifiers of the clones. 252 parameters (dict | None): parameters passed to the invocation. 253 254 Returns: 255 output_files (list[File]): The list of output files generated by the File Intent of ML model. 256 """ 257 ml_api_instance = openapi_client.MlApi(self._inner_api_client) 258 file_api = FileApi(self._inner_api_client, self._log_level) 259 if clone_ids is not None and collection_ids is not None: 260 raise ValueError("Either clone_ids or collection_ids must be provided, but not both") 261 262 with ApiErrorContext(): 263 invoke_ml_endpoint_request = openapi_client.InvokeMlEndpointRequest.from_dict( 264 dict( 265 clone_ids=clone_ids, 266 collection_ids=collection_ids, 267 parameters=parameters, 268 intent_config=intent_config.model_dump() if intent_config else None, 269 ) 270 ) 271 assert invoke_ml_endpoint_request is not None 272 result = ml_api_instance.invoke_endpoint(id=endpoint_id, invoke_ml_endpoint_request=invoke_ml_endpoint_request) 273 assert result.workflow_execution_id is not None 274 275 workflow_execution_id = WorkflowExecutionId(result.workflow_execution_id) 276 277 def on_complete(workflow_execution_id: WorkflowExecutionId, execution_state: TaskState) -> list[File]: 278 assert execution_state == TaskState.SUCCEEDED, ( 279 f"Workflow execution {workflow_execution_id} did not reach {TaskState.SUCCEEDED} state, got {execution_state} state instead" 280 ) 281 282 logger.success(f"Ml invocation with workflow execution ID: {workflow_execution_id} has successfully finished.") 283 284 # Get potential file intent output files 285 file_intent_output_ids = ml_api_instance.get_invocation_output(workflow_execution_id).file_ids 286 if len(file_intent_output_ids) > 0: 287 logger.success("Ml invocation File Intent output files retrieved successfully") 288 return [file_api.get_file_by_id(FileId(file_id)) for file_id in file_intent_output_ids] 289 else: 290 return [] 291 292 waitable = WorkflowExecutionWaitable( 293 workflow_execution_id=workflow_execution_id, 294 on_complete=on_complete, 295 ) 296 return Execution(wait=waitable.wait_and_return_result, check_execution_state=waitable.check_execution_state)
58 def get_ml_endpoints(self) -> list[MlEndpoint]: 59 """Get all ML endpoints. 60 61 Returns: 62 list[enpi_api.l2.types.ml.MlEndpoint]: A list of ML endpoints. 63 """ 64 65 ml_api_instance = openapi_client.MlApi(self._inner_api_client) 66 try: 67 return [MlEndpoint.from_raw(i) for i in ml_api_instance.get_ml_endpoints().ml_endpoints] 68 except openapi_client.ApiException as e: 69 raise ApiError(e)
71 def get_ml_invocation_stats(self) -> list[MlInvocationStats]: 72 """Get ML invocation statistics. 73 74 Returns: 75 list[enpi_api.l2.types.ml.MlInvocationStats]: A list of ML invocation statistics. 76 """ 77 ml_api_instance = openapi_client.MlApi(self._inner_api_client) 78 try: 79 stats = ml_api_instance.get_ml_invocation_stats().stats 80 return [MlInvocationStats.from_raw(i) for i in stats] 81 except openapi_client.ApiException as e: 82 raise ApiError(e)
Get ML invocation statistics.
Returns:
list[enpi_api.l2.types.ml.MlInvocationStats]: A list of ML invocation statistics.
def
register_ml_endpoint( self, display_name: str, input_mapping: list[enpi_api.l2.types.ml.MlInput], output_intents: list[enpi_api.l2.types.ml.MlClusterIntent | enpi_api.l2.types.ml.MlFileIntent | enpi_api.l2.types.ml.MlLiabilityIntent | enpi_api.l2.types.ml.MlMetadataIntent | enpi_api.l2.types.ml.MlNewSequenceIntent | enpi_api.l2.types.ml.MlStructureIntent], vendor_config: enpi_api.l2.types.ml.MlAwsEndpointConfig, signatures: list[enpi_api.l2.types.ml.MlEndpointSignature], parameter_mapping: list[enpi_api.l2.types.ml.MlBaseParam | enpi_api.l2.types.ml.MlEnumParam] | None = None) -> enpi_api.l2.types.ml.MlEndpointId:
84 def register_ml_endpoint( 85 self, 86 display_name: str, 87 input_mapping: list[MlInput], 88 output_intents: list[MlOutputIntent], 89 vendor_config: MlAwsEndpointConfig, 90 signatures: list[MlEndpointSignature], 91 parameter_mapping: list[MlParam] | None = None, 92 ) -> MlEndpointId: 93 """Register a ML endpoint. 94 95 Args: 96 display_name (str): The display name of the ML endpoint. 97 input_mapping (list[MlInput]): The input mapping of the ML endpoint. 98 output_intents (list[MlOutputIntent]): The output intents of the ML endpoint. 99 vendor_config (MlAwsEndpointConfig): The AWS endpoint configuration of the ML endpoint. 100 signatures (list[MlEndpointSignature]): The signatures of the ML endpoint. 101 parameter_mapping (list[MlParam] | None): The parameter mapping of the ML endpoint. 102 103 Returns: 104 endpoint_id (str): The unique identifier of a ML endpoint. 105 """ 106 107 ml_api_instance = openapi_client.MlApi(self._inner_api_client) 108 try: 109 parameter_mapping_items = ( 110 [ 111 openapi_client.MlParameterMapItem.model_validate( 112 { 113 "type": pm["type"], 114 "input_key": pm["input_key"], 115 "label": pm["label"] if "label" in pm and pm["label"] is not None else None, 116 "source": "parameter", 117 } 118 ) 119 for pm in parameter_mapping 120 ] 121 if parameter_mapping is not None 122 else None 123 ) 124 125 vendor_config_to_register = openapi_client.MlAwsEndpointConfig.from_dict( 126 {**vendor_config, "region": vendor_config.get("region", "eu-west-1"), "endpoint_type": "external"} 127 ) 128 assert vendor_config_to_register is not None 129 130 result = ml_api_instance.register_ml_endpoint( 131 register_ml_endpoint_request=openapi_client.RegisterMlEndpointRequest( 132 display_name=display_name, 133 input_mapping=[openapi_client.MlInput.model_validate(i) for i in input_mapping], 134 parameter_mapping=parameter_mapping_items, 135 output_intents=[openapi_client.MlOutputIntent.from_dict(dict(i)) for i in output_intents], 136 vendor_config=vendor_config_to_register, 137 signatures=[openapi_client.MlEndpointSignature.model_validate(i) for i in signatures], 138 ) 139 ) 140 return MlEndpointId(result.endpoint_id) 141 except openapi_client.ApiException as e: 142 raise ApiError(e)
Register a ML endpoint.
Arguments:
- display_name (str): The display name of the ML endpoint.
- input_mapping (list[MlInput]): The input mapping of the ML endpoint.
- output_intents (list[MlOutputIntent]): The output intents of the ML endpoint.
- vendor_config (MlAwsEndpointConfig): The AWS endpoint configuration of the ML endpoint.
- signatures (list[MlEndpointSignature]): The signatures of the ML endpoint.
- parameter_mapping (list[MlParam] | None): The parameter mapping of the ML endpoint.
Returns:
endpoint_id (str): The unique identifier of a ML endpoint.
144 def unregister_ml_endpoint(self, endpoint_id: MlEndpointId) -> None: 145 """Unregister a ML endpoint. 146 147 Args: 148 endpoint_id (MlEndpointId): The unique identifier of a ML endpoint. 149 """ 150 151 ml_api_instance = openapi_client.MlApi(self._inner_api_client) 152 try: 153 ml_api_instance.unregister_ml_endpoint(id=endpoint_id) 154 except openapi_client.ApiException as e: 155 raise ApiError(e)
Unregister a ML endpoint.
Arguments:
- endpoint_id (MlEndpointId): The unique identifier of a ML endpoint.
def
deploy_model( self, display_name: str, input_mapping: list[enpi_api.l2.types.ml.MlInput], output_intents: list[enpi_api.l2.types.ml.MlClusterIntent | enpi_api.l2.types.ml.MlFileIntent | enpi_api.l2.types.ml.MlLiabilityIntent | enpi_api.l2.types.ml.MlMetadataIntent | enpi_api.l2.types.ml.MlNewSequenceIntent | enpi_api.l2.types.ml.MlStructureIntent], model_uri: enpi_api.l2.types.ml.MlflowModelUri, signatures: list[enpi_api.l2.types.ml.MlEndpointSignature], parameter_mapping: list[enpi_api.l2.types.ml.MlBaseParam | enpi_api.l2.types.ml.MlEnumParam] | None = None, base_image: Union[Literal['cpu'], Literal['gpu']] = 'cpu') -> enpi_api.l2.types.execution.Execution[NewType]:
157 def deploy_model( 158 self, 159 display_name: str, 160 input_mapping: list[MlInput], 161 output_intents: list[MlOutputIntent], 162 model_uri: MlflowModelUri, 163 signatures: list[MlEndpointSignature], 164 parameter_mapping: list[MlParam] | None = None, 165 base_image: Literal["cpu"] | Literal["gpu"] = "cpu", 166 ) -> Execution[MlEndpointId]: 167 """Deploy a model from ML flow as a SageMaker model with an endpoint. 168 169 Args: 170 display_name (str): The display name of the ML endpoint. 171 input_mapping (list[MlInput]): The input mapping of the ML endpoint. 172 output_intents (list[MlOutputIntent]): The output intents of the ML endpoint. 173 model_uri (MlFlowModelUri): The URI of a MLflow model. 174 parameter_mapping (list[MlParam] | None): The parameter mapping of the ML endpoint. 175 """ 176 ml_api_instance = openapi_client.MlApi(self._inner_api_client) 177 parameter_mapping_items = ( 178 [ 179 openapi_client.MlParameterMapItem.model_validate( 180 { 181 "type": pm["type"], 182 "input_key": pm["input_key"], 183 "label": pm["label"] if "label" in pm and pm["label"] is not None else None, 184 "source": "parameter", 185 "enum_options": pm.get("enum_options"), 186 } 187 ) 188 for pm in parameter_mapping 189 ] 190 if parameter_mapping is not None 191 else None 192 ) 193 194 payload = openapi_client.DeployModelRequest( 195 display_name=display_name, 196 parameter_mapping=parameter_mapping_items, 197 input_mapping=[openapi_client.MlInput.model_validate(i) for i in input_mapping], 198 output_intents=[openapi_client.MlOutputIntent.from_dict(dict(i)) for i in output_intents], 199 model_uri=model_uri, 200 signatures=[openapi_client.MlEndpointSignature.model_validate(i) for i in signatures], 201 base_image=base_image, 202 ) 203 204 with ApiErrorContext(): 205 deploy_model_response = ml_api_instance.deploy_model(deploy_model_request=payload) 206 assert deploy_model_response.workflow_execution_id is not None 207 208 workflow_execution_id = WorkflowExecutionId(int(deploy_model_response.workflow_execution_id)) 209 210 def on_complete(task_id: WorkflowExecutionTaskId, task_state: TaskState) -> MlEndpointId: 211 # If the task has succeeded, return the endpoint_id 212 match task_state: 213 case TaskState.SUCCEEDED: 214 result = ml_api_instance.get_endpoint_by_workflow_execution_task_id(workflow_execution_task_id=task_id) 215 return MlEndpointId(result.endpoint_id) 216 case _: 217 raise DeploymentFailed(task_id) 218 219 waitable = WorkflowExecutionTaskWaitable[MlEndpointId]( 220 workflow_execution_id=workflow_execution_id, 221 task_template_name=WorkflowTaskTemplateName.ENPI_APP_ML_MONITOR_INFERENCE_SERVICE, 222 on_complete=on_complete, 223 ) 224 225 return Execution(wait=waitable.wait_and_return_result, check_execution_state=waitable.check_execution_state)
Deploy a model from ML flow as a SageMaker model with an endpoint.
Arguments:
- display_name (str): The display name of the ML endpoint.
- input_mapping (list[MlInput]): The input mapping of the ML endpoint.
- output_intents (list[MlOutputIntent]): The output intents of the ML endpoint.
- model_uri (MlFlowModelUri): The URI of a MLflow model.
- parameter_mapping (list[MlParam] | None): The parameter mapping of the ML endpoint.
227 def undeploy_model(self, endpoint_id: MlEndpointId) -> None: 228 """Remove a SageMaker model and endpoint. 229 230 Args: 231 endpoint_id (MlEndpointId): The unique identifier of a ML endpoint. 232 """ 233 ml_api_instance = openapi_client.MlApi(self._inner_api_client) 234 try: 235 ml_api_instance.undeploy_model(id=endpoint_id) 236 except openapi_client.ApiException as e: 237 raise ApiError(e)
Remove a SageMaker model and endpoint.
Arguments:
- endpoint_id (MlEndpointId): The unique identifier of a ML endpoint.
def
invoke_endpoint( self, endpoint_id: enpi_api.l2.types.ml.MlEndpointId, clone_ids: list[enpi_api.l2.types.clone.CloneId] | None = None, collection_ids: list[enpi_api.l2.types.collection.CollectionId] | None = None, parameters: dict[str, str | int | float | bool | list[str]] | None = None, intent_config: enpi_api.l2.types.ml.MlInvocationIntentConfig | None = None) -> enpi_api.l2.types.execution.Execution[list[File]]:
239 def invoke_endpoint( 240 self, 241 endpoint_id: MlEndpointId, 242 clone_ids: list[CloneId] | None = None, 243 collection_ids: list[CollectionId] | None = None, 244 parameters: dict[str, str | int | float | bool | list[str]] | None = None, 245 intent_config: MlInvocationIntentConfig | None = None, 246 ) -> Execution[list[File]]: 247 """Invoke a ML endpoint. 248 249 Args: 250 endpoint_id (MlEndpointId): The unique identifier of a ML endpoint. 251 clone_ids (list[CloneId]): The unique identifiers of the clones. 252 parameters (dict | None): parameters passed to the invocation. 253 254 Returns: 255 output_files (list[File]): The list of output files generated by the File Intent of ML model. 256 """ 257 ml_api_instance = openapi_client.MlApi(self._inner_api_client) 258 file_api = FileApi(self._inner_api_client, self._log_level) 259 if clone_ids is not None and collection_ids is not None: 260 raise ValueError("Either clone_ids or collection_ids must be provided, but not both") 261 262 with ApiErrorContext(): 263 invoke_ml_endpoint_request = openapi_client.InvokeMlEndpointRequest.from_dict( 264 dict( 265 clone_ids=clone_ids, 266 collection_ids=collection_ids, 267 parameters=parameters, 268 intent_config=intent_config.model_dump() if intent_config else None, 269 ) 270 ) 271 assert invoke_ml_endpoint_request is not None 272 result = ml_api_instance.invoke_endpoint(id=endpoint_id, invoke_ml_endpoint_request=invoke_ml_endpoint_request) 273 assert result.workflow_execution_id is not None 274 275 workflow_execution_id = WorkflowExecutionId(result.workflow_execution_id) 276 277 def on_complete(workflow_execution_id: WorkflowExecutionId, execution_state: TaskState) -> list[File]: 278 assert execution_state == TaskState.SUCCEEDED, ( 279 f"Workflow execution {workflow_execution_id} did not reach {TaskState.SUCCEEDED} state, got {execution_state} state instead" 280 ) 281 282 logger.success(f"Ml invocation with workflow execution ID: {workflow_execution_id} has successfully finished.") 283 284 # Get potential file intent output files 285 file_intent_output_ids = ml_api_instance.get_invocation_output(workflow_execution_id).file_ids 286 if len(file_intent_output_ids) > 0: 287 logger.success("Ml invocation File Intent output files retrieved successfully") 288 return [file_api.get_file_by_id(FileId(file_id)) for file_id in file_intent_output_ids] 289 else: 290 return [] 291 292 waitable = WorkflowExecutionWaitable( 293 workflow_execution_id=workflow_execution_id, 294 on_complete=on_complete, 295 ) 296 return Execution(wait=waitable.wait_and_return_result, check_execution_state=waitable.check_execution_state)
Invoke a ML endpoint.
Arguments:
- endpoint_id (MlEndpointId): The unique identifier of a ML endpoint.
- clone_ids (list[CloneId]): The unique identifiers of the clones.
- parameters (dict | None): parameters passed to the invocation.
Returns:
output_files (list[File]): The list of output files generated by the File Intent of ML model.