Skip to content

Commit

Permalink
chore: update Vertex Model artifact resolution to use versioned model…
Browse files Browse the repository at this point in the history
…s and reduce allow expeirment run name length (#1468)
  • Loading branch information
sasha-gitg authored Jun 29, 2022
1 parent d07715a commit f8aea02
Show file tree
Hide file tree
Showing 6 changed files with 145 additions and 6 deletions.
6 changes: 5 additions & 1 deletion google/cloud/aiplatform/metadata/artifact.py
Original file line number Diff line number Diff line change
Expand Up @@ -535,14 +535,18 @@ def create_vertex_resource_artifact(cls, resource: Union[models.Model]) -> Artif
"""
cls.validate_resource_supports_metadata(resource)
resource.wait()

metadata_type = cls._resource_to_artifact_type[type(resource)]
uri = rest_utils.make_gcp_resource_rest_url(resource=resource)

return Artifact.create(
schema_title=metadata_type,
display_name=getattr(resource.gca_resource, "display_name", None),
uri=uri,
metadata={"resourceName": resource.resource_name},
# Note that support for non-versioned resources requires
# change to reference `resource_name` please update if
# supporting resource other than Model
metadata={"resourceName": resource.versioned_resource_name},
project=resource.project,
location=resource.location,
credentials=resource.credentials,
Expand Down
3 changes: 3 additions & 0 deletions google/cloud/aiplatform/metadata/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,3 +65,6 @@
schema_version="0.0.1",
metadata={_VERTEX_EXPERIMENT_TRACKING_LABEL: True},
)

_TB_RUN_ARTIFACT_POST_FIX_ID = "-tb-run"
_EXPERIMENT_RUN_MAX_LENGTH = 128 - len(_TB_RUN_ARTIFACT_POST_FIX_ID)
6 changes: 3 additions & 3 deletions google/cloud/aiplatform/metadata/experiment_run_resource.py
Original file line number Diff line number Diff line change
Expand Up @@ -607,9 +607,9 @@ def _validate_run_id(run_id: str):
ValueError if run id is too long.
"""

if len(run_id) > 128:
if len(run_id) > constants._EXPERIMENT_RUN_MAX_LENGTH:
raise ValueError(
f"Length of Experiment ID and Run ID cannot be greater than 128. "
f"Length of Experiment ID and Run ID cannot be greater than {constants._EXPERIMENT_RUN_MAX_LENGTH}. "
f"{run_id} is of length {len(run_id)}"
)

Expand Down Expand Up @@ -822,7 +822,7 @@ def _tensorboard_run_id(run_id: str) -> str:
Returns:
Resource id for the associated tensorboard run artifact.
"""
return f"{run_id}-tb-run"
return f"{run_id}{constants._TB_RUN_ARTIFACT_POST_FIX_ID}"

@_v1_not_supported
def assign_backing_tensorboard(
Expand Down
5 changes: 4 additions & 1 deletion google/cloud/aiplatform/utils/rest_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,10 @@ def make_gcp_resource_rest_url(resource: base.VertexAiResourceNoun) -> str:
Returns:
The formatted url of resource.
"""
resource_name = resource.resource_name
try:
resource_name = resource.versioned_resource_name
except AttributeError:
resource_name = resource.resource_name
version = resource.api_client._default_version
api_uri = resource.api_client.api_endpoint

Expand Down
20 changes: 19 additions & 1 deletion tests/unit/aiplatform/test_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -993,7 +993,6 @@ def test_init_experiment_wrong_schema(self):
)

@pytest.mark.usefixtures("get_metadata_store_mock")
@pytest.mark.usefixtures()
def test_start_run(
self,
get_experiment_mock,
Expand Down Expand Up @@ -1025,6 +1024,25 @@ def test_start_run(
context=_EXPERIMENT_MOCK.name, child_contexts=[_EXPERIMENT_RUN_MOCK.name]
)

@pytest.mark.usefixtures("get_metadata_store_mock", "get_experiment_mock")
def test_start_run_fails_when_run_name_too_long(self):

aiplatform.init(
project=_TEST_PROJECT,
location=_TEST_LOCATION,
experiment=_TEST_EXPERIMENT,
)

run_name_too_long = "".join(
"a"
for _ in range(
constants._EXPERIMENT_RUN_MAX_LENGTH + 2 - len(_TEST_EXPERIMENT)
)
)

with pytest.raises(ValueError):
aiplatform.start_run(run_name_too_long)

@pytest.mark.usefixtures(
"get_metadata_store_mock",
"get_experiment_mock",
Expand Down
111 changes: 111 additions & 0 deletions tests/unit/aiplatform/test_metadata_resources.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
from google.cloud.aiplatform.metadata import artifact
from google.cloud.aiplatform.metadata import context
from google.cloud.aiplatform.metadata import execution
from google.cloud.aiplatform.metadata import utils as metadata_utils
from google.cloud.aiplatform_v1 import (
MetadataServiceClient,
AddExecutionEventsResponse,
Expand All @@ -39,6 +40,8 @@
AddContextArtifactsAndExecutionsResponse,
)

import test_models

# project
_TEST_PROJECT = "test-project"
_TEST_LOCATION = "us-central1"
Expand Down Expand Up @@ -543,6 +546,34 @@ def test_add_executions_only(self, add_context_artifacts_and_executions_mock):
)


get_model_with_version_mock = test_models.get_model_with_version
_VERTEX_MODEL_ARTIFACT_URI = f"https://{_TEST_LOCATION}-aiplatform.googleapis.com/v1/{test_models._TEST_MODEL_OBJ_WITH_VERSION.name}"


@pytest.fixture
def list_vertex_model_artifact_mock():
with patch.object(MetadataServiceClient, "list_artifacts") as list_artifacts_mock:
list_artifacts_mock.return_value = [
GapicArtifact(
name=_TEST_ARTIFACT_NAME,
uri=_VERTEX_MODEL_ARTIFACT_URI,
display_name=_TEST_DISPLAY_NAME,
schema_title=_TEST_SCHEMA_TITLE,
schema_version=_TEST_SCHEMA_VERSION,
description=_TEST_DESCRIPTION,
metadata=_TEST_METADATA,
)
]
yield list_artifacts_mock


@pytest.fixture
def list_artifact_empty_mock():
with patch.object(MetadataServiceClient, "list_artifacts") as list_artifacts_mock:
list_artifacts_mock.return_value = []
yield list_artifacts_mock


class TestExecution:
def setup_method(self):
reload(initializer)
Expand Down Expand Up @@ -680,6 +711,86 @@ def test_add_artifact(self, add_execution_events_mock):
events=[Event(artifact=_TEST_ARTIFACT_NAME, type_=Event.Type.OUTPUT)],
)

@pytest.mark.usefixtures("get_execution_mock", "get_model_with_version_mock")
def test_add_vertex_model(
self, add_execution_events_mock, list_vertex_model_artifact_mock
):
aiplatform.init(project=_TEST_PROJECT, location=_TEST_LOCATION)

my_execution = execution.Execution.get_or_create(
resource_id=_TEST_EXECUTION_ID,
schema_title=_TEST_SCHEMA_TITLE,
display_name=_TEST_DISPLAY_NAME,
schema_version=_TEST_SCHEMA_VERSION,
description=_TEST_DESCRIPTION,
metadata=_TEST_METADATA,
metadata_store_id=_TEST_METADATA_STORE,
)

my_model = aiplatform.Model(test_models._TEST_MODEL_NAME)
my_execution.assign_output_artifacts(artifacts=[my_model])

list_vertex_model_artifact_mock.assert_called_once_with(
request=dict(
parent="projects/test-project/locations/us-central1/metadataStores/default",
filter=metadata_utils._make_filter_string(
schema_title="google.VertexModel", uri=_VERTEX_MODEL_ARTIFACT_URI
),
)
)

add_execution_events_mock.assert_called_once_with(
execution=_TEST_EXECUTION_NAME,
events=[Event(artifact=_TEST_ARTIFACT_NAME, type_=Event.Type.OUTPUT)],
)

@pytest.mark.usefixtures("get_execution_mock", "get_model_with_version_mock")
def test_add_vertex_model_not_resolved(
self, add_execution_events_mock, list_artifact_empty_mock, create_artifact_mock
):
aiplatform.init(project=_TEST_PROJECT, location=_TEST_LOCATION)

my_execution = execution.Execution.get_or_create(
resource_id=_TEST_EXECUTION_ID,
schema_title=_TEST_SCHEMA_TITLE,
display_name=_TEST_DISPLAY_NAME,
schema_version=_TEST_SCHEMA_VERSION,
description=_TEST_DESCRIPTION,
metadata=_TEST_METADATA,
metadata_store_id=_TEST_METADATA_STORE,
)

my_model = aiplatform.Model(test_models._TEST_MODEL_NAME)
my_execution.assign_output_artifacts(artifacts=[my_model])

list_artifact_empty_mock.assert_called_once_with(
request=dict(
parent="projects/test-project/locations/us-central1/metadataStores/default",
filter=metadata_utils._make_filter_string(
schema_title="google.VertexModel", uri=_VERTEX_MODEL_ARTIFACT_URI
),
)
)

expected_artifact = GapicArtifact(
schema_title="google.VertexModel",
display_name=test_models._TEST_MODEL_OBJ_WITH_VERSION.display_name,
uri=_VERTEX_MODEL_ARTIFACT_URI,
metadata={"resourceName": test_models._TEST_MODEL_OBJ_WITH_VERSION.name},
state=GapicArtifact.State.LIVE,
)

create_artifact_mock.assert_called_once_with(
parent="projects/test-project/locations/us-central1/metadataStores/default",
artifact=expected_artifact,
artifact_id=None,
)

add_execution_events_mock.assert_called_once_with(
execution=_TEST_EXECUTION_NAME,
events=[Event(artifact=_TEST_ARTIFACT_NAME, type_=Event.Type.OUTPUT)],
)

@pytest.mark.usefixtures("get_execution_mock")
def test_query_input_and_output_artifacts(
self, query_execution_inputs_and_outputs_mock
Expand Down

0 comments on commit f8aea02

Please sign in to comment.