diff --git a/azuremlpythonsdk-v2/azml_03_realtime_inference.py b/azuremlpythonsdk-v2/azml_03_realtime_inference.py index 6fa24de..cab4430 100644 --- a/azuremlpythonsdk-v2/azml_03_realtime_inference.py +++ b/azuremlpythonsdk-v2/azml_03_realtime_inference.py @@ -14,26 +14,28 @@ online_endpoint_name = ("srv-" + AZURE_WORKSPACE_NAME).lower() def main(): # 1. Create or Load a ML client - ml_client = XXXX() + ml_client = create_or_load_ml_client() # 2. Create a endpoint print(f"Creating endpoint {online_endpoint_name}") - endpoint = XXXX( + endpoint = ManagedOnlineEndpoint( name=online_endpoint_name, auth_mode="key", ) # Method `result()` should be added to wait until completion - ml_client.online_endpoints.XXXX(endpoint).result() + ml_client.online_endpoints.begin_create_or_update(endpoint).result() # 3. Create a deployment - best_model_latest_version = XXXX + best_model_latest_version = max( + [int(m.version) for m in ml_client.models.list(name=best_model_name)] + ) - blue_deployment = XXXX( + blue_deployment = ManagedOnlineDeployment( name=online_endpoint_name, endpoint_name=online_endpoint_name, # @latest doesn't work with model paths - model=XXXX, + model=f"azureml:{best_model_name}:{best_model_latest_version}", instance_type=VM_SIZE, instance_count=1, ) diff --git a/azuremlpythonsdk-v2/azml_04_test_inference.py b/azuremlpythonsdk-v2/azml_04_test_inference.py index a45581f..a76d9b7 100644 --- a/azuremlpythonsdk-v2/azml_04_test_inference.py +++ b/azuremlpythonsdk-v2/azml_04_test_inference.py @@ -7,11 +7,11 @@ from ml_client import create_or_load_ml_client def main(): # 1. Load a Workspace - ml_client = XXXX() + ml_client = create_or_load_ml_client() # 2. Get predictions - output = ml_client.online_endpoints.XXXX( - endpoint_name=XXXX, + output = ml_client.online_endpoints.invoke( + endpoint_name=online_endpoint_name, deployment_name=online_endpoint_name, request_file="./diabetes_test_inference/request.json", )