@@ -255,7 +255,8 @@ deploymentSpec:
255255 \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\" 3.9\" ' &&\
256256 \ python3 -m pip install --quiet --no-warn-script-location 'google-cloud-aiplatform>=1.59.0'\
257257 \ 'google-cloud-run>=0.10.0' 'google-cloud-storage>=2.10.0' 'requests>=2.31.0'\
258- \ 'joblib>=1.4.2' && \" $0\" \" $@\"\n "
258+ \ 'joblib>=1.4.2' 'scikit-learn>=1.3.0' 'pandas>=2.0.0' 'numpy>=1.24.0'\
259+ \ && \" $0\" \" $@\"\n "
259260 - sh
260261 - -ec
261262 - ' program_path=$(mktemp -d)
@@ -270,30 +271,46 @@ deploymentSpec:
270271 \ *\n\n def deploy_blessed_model_to_fastapi(\n project_id: str,\n location:\
271272 \ str,\n model_name: str,\n service_name: str,\n service_endpoint:\
272273 \ Output[Artifact]\n ):\n from google.cloud import aiplatform, aiplatform_v1,\
273- \ run_v2, storage\n import joblib\n import tempfile\n import os\n \
274- \ import requests\n import time\n\n print(f\" Starting FastAPI deployment\
275- \ for blessed model: {model_name}\" )\n print(f\" Service name: {service_name}\" \
276- )\n\n # 1. Initialize Vertex AI and find blessed model\n aiplatform.init(project=project_id,\
277- \ location=location)\n\n client = aiplatform_v1.ModelServiceClient(\n \
278- \ client_options={\" api_endpoint\" : f\" {location}-aiplatform.googleapis.com\" \
279- }\n )\n request = {\n \" parent\" : f\" projects/{project_id}/locations/{location}\" \
280- ,\n \" filter\" : f\" display_name={model_name}\"\n }\n\n models\
281- \ = list(client.list_models(request=request))\n blessed_model = None\n \
282- \n print(f\" Found {len(models)} models with name {model_name}\" )\n\n \
283- \ for model in models:\n print(f\" Model: {model.name}, Aliases:\
284- \ {list(model.version_aliases)}\" )\n if \" blessed\" in model.version_aliases:\n \
285- \ blessed_model = model\n break\n\n if not blessed_model:\n \
286- \ raise ValueError(f\" No blessed version found for model {model_name}.\
287- \ Available models: {[(m.name, list(m.version_aliases)) for m in models]}\" \
288- )\n\n print(f\" Found blessed model: {blessed_model.name}\" )\n print(f\" \
289- Model URI: {blessed_model.artifact_uri}\" )\n\n # 2. Download joblib model\
290- \ from blessed version\n gcs_uri = blessed_model.artifact_uri\n if\
291- \ not gcs_uri.startswith('gs://'):\n raise ValueError(f\" Expected\
292- \ GCS URI, got: {gcs_uri}\" )\n\n bucket_name = gcs_uri.replace('gs://',\
293- \ '').split('/')[0]\n model_path = '/'.join(gcs_uri.replace('gs://',\
294- \ '').split('/')[1:])\n\n print(f\" Downloading model from gs://{bucket_name}/{model_path}\" \
295- )\n\n storage_client = storage.Client()\n bucket = storage_client.bucket(bucket_name)\n \
296- \n # Download and validate the model\n model_blob_path = f\" {model_path}/model.joblib\" \
274+ \ run_v2, storage\n from google.auth import default\n import joblib\n \
275+ \ import tempfile\n import os\n import requests\n import time\n \
276+ \n print(f\" Starting FastAPI deployment for blessed model: {model_name}\" \
277+ )\n print(f\" Service name: {service_name}\" )\n\n # 1. Initialize Vertex\
278+ \ AI and get credentials\n aiplatform.init(project=project_id, location=location)\n \
279+ \n # Get default credentials\n credentials, _ = default()\n print(credentials)\n \
280+ \n # Create client with explicit credentials\n client = aiplatform_v1.ModelServiceClient(\n \
281+ \ credentials=credentials,\n client_options={\" api_endpoint\" \
282+ : f\" {location}-aiplatform.googleapis.com\" }\n )\n\n print(f\" Searching\
283+ \ for blessed model with name: {model_name}\" )\n\n # Use the high-level\
284+ \ aiplatform library to list all model versions\n # models = aiplatform.Model.list(filter=f\" \
285+ display_name={model_name}\" )\n # blessed_model = None\n\n request\
286+ \ = {\n \" parent\" : f\" projects/{project_id}/locations/{location}\" \
287+ ,\n \" filter\" : f\" display_name={model_name}\"\n }\n\n \
288+ \ models = list(client.list_models(request=request))\n blessed_model\
289+ \ = None\n\n print(f\" Found {len(models)} model versions with name {model_name}\" \
290+ )\n\n # Search through all model versions (each item in models is already\
291+ \ a version)\n for parent_model in models:\n print(f\" Checking\
292+ \ parent model: {parent_model.name}\" )\n\n # List all versions of\
293+ \ this model\n versions_request = {\" name\" : parent_model.name}\n \
294+ \ versions = list(client.list_model_versions(request=versions_request))\n \
295+ \n print(f\" Found {len(versions)} versions for this model\" )\n\n \
296+ \ for version in versions:\n print(f\" Version {version.version_id}:\
297+ \ Aliases = {list(version.version_aliases)}\" )\n if \" blessed\" \
298+ \ in version.version_aliases:\n blessed_model = version\n \
299+ \ print(f\" Found blessed version: {version.version_id}\" \
300+ )\n break\n\n if blessed_model:\n break\n \
301+ \n if not blessed_model:\n available_versions = [(m.resource_name,\
302+ \ m.version_id, list(m.version_aliases)) for m in models]\n raise\
303+ \ ValueError(f\" No blessed version found for model {model_name}. Available\
304+ \ versions: {available_versions}\" )\n\n print(f\" Found blessed model:\
305+ \ {blessed_model.name}\" )\n print(f\" Model URI: {blessed_model.artifact_uri}\" \
306+ )\n\n # 2. Download joblib model from blessed version\n gcs_uri =\
307+ \ blessed_model.artifact_uri\n if not gcs_uri.startswith('gs://'):\n \
308+ \ raise ValueError(f\" Expected GCS URI, got: {gcs_uri}\" )\n\n \
309+ \ bucket_name = gcs_uri.replace('gs://', '').split('/')[0]\n model_path\
310+ \ = '/'.join(gcs_uri.replace('gs://', '').split('/')[1:])\n\n print(f\" \
311+ Downloading model from gs://{bucket_name}/{model_path}\" )\n\n storage_client\
312+ \ = storage.Client()\n bucket = storage_client.bucket(bucket_name)\n\n \
313+ \ # Download and validate the model\n model_blob_path = f\" {model_path}/model.joblib\" \
297314 \n blob = bucket.blob(model_blob_path)\n\n if not blob.exists():\n \
298315 \ raise ValueError(f\" Model file not found at gs://{bucket_name}/{model_blob_path}\" \
299316 )\n\n with tempfile.NamedTemporaryFile(suffix='.joblib', delete=False)\
0 commit comments