Skip to content

Commit

Permalink
[HWORKS-1048] Support multiple modularized project environments (#240)
Browse files Browse the repository at this point in the history
  • Loading branch information
robzor92 authored Jul 5, 2024
1 parent 954a07b commit 5d6bca9
Show file tree
Hide file tree
Showing 10 changed files with 70 additions and 11 deletions.
2 changes: 1 addition & 1 deletion python/hsml/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ class MODEL:


class MODEL_REGISTRY:
HOPSFS_MOUNT_PREFIX = "/home/yarnapp/hopsfs/"
HOPSFS_MOUNT_PREFIX = "/hopsfs/"


class MODEL_SERVING:
Expand Down
9 changes: 9 additions & 0 deletions python/hsml/deployment.py
Original file line number Diff line number Diff line change
Expand Up @@ -461,6 +461,15 @@ def api_protocol(self):
def api_protocol(self, api_protocol: str):
self._predictor.api_protocol = api_protocol

@property
def environment(self):
"""Name of inference environment"""
return self._predictor.environment

@environment.setter
def environment(self, environment: str):
self._predictor.environment = environment

def __repr__(self):
desc = (
f", description: {self._description!r}"
Expand Down
9 changes: 2 additions & 7 deletions python/hsml/engine/model_engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
# limitations under the License.
#

import importlib
import json
import os
import tempfile
Expand All @@ -24,7 +23,7 @@
from hsml import client, constants, util
from hsml.client.exceptions import ModelRegistryException, RestAPIError
from hsml.core import dataset_api, model_api
from hsml.engine import hopsworks_engine, local_engine
from hsml.engine import local_engine
from tqdm.auto import tqdm


Expand All @@ -33,11 +32,7 @@ def __init__(self):
self._model_api = model_api.ModelApi()
self._dataset_api = dataset_api.DatasetApi()

pydoop_spec = importlib.util.find_spec("pydoop")
if pydoop_spec is None:
self._engine = local_engine.LocalEngine()
else:
self._engine = hopsworks_engine.HopsworksEngine()
self._engine = local_engine.LocalEngine()

def _poll_model_available(self, model_instance, await_registration):
if await_registration > 0:
Expand Down
3 changes: 3 additions & 0 deletions python/hsml/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -173,6 +173,7 @@ def deploy(
inference_batcher: Optional[Union[InferenceBatcher, dict]] = None,
transformer: Optional[Union[Transformer, dict]] = None,
api_protocol: Optional[str] = IE.API_PROTOCOL_REST,
environment: Optional[str] = None,
):
"""Deploy the model.
Expand Down Expand Up @@ -203,6 +204,7 @@ def deploy(
inference_batcher: Inference batcher configuration.
transformer: Transformer to be deployed together with the predictor.
api_protocol: API protocol to be enabled in the deployment (i.e., 'REST' or 'GRPC'). Defaults to 'REST'.
environment: The inference environment to use.
# Returns
`Deployment`: The deployment metadata object of a new or existing deployment.
Expand All @@ -223,6 +225,7 @@ def deploy(
inference_batcher=inference_batcher,
transformer=transformer,
api_protocol=api_protocol,
environment=environment,
)

return predictor.deploy()
Expand Down
5 changes: 3 additions & 2 deletions python/hsml/model_serving.py
Original file line number Diff line number Diff line change
Expand Up @@ -285,7 +285,7 @@ def postprocess(self, outputs):

return Transformer(script_file=script_file, resources=resources)

def create_deployment(self, predictor: Predictor, name: Optional[str] = None):
def create_deployment(self, predictor: Predictor, name: Optional[str] = None, environment: Optional[str] = None):
"""Create a Deployment metadata object.
!!! example
Expand Down Expand Up @@ -348,12 +348,13 @@ def create_deployment(self, predictor: Predictor, name: Optional[str] = None):
# Arguments
predictor: predictor to be used in the deployment
name: name of the deployment
environment: The inference environment to use
# Returns
`Deployment`. The model metadata object.
"""

return Deployment(predictor=predictor, name=name)
return Deployment(predictor=predictor, name=name, environment=environment)

@property
def project_name(self):
Expand Down
16 changes: 16 additions & 0 deletions python/hsml/predictor.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@ def __init__(
created_at: Optional[str] = None,
creator: Optional[str] = None,
api_protocol: Optional[str] = INFERENCE_ENDPOINTS.API_PROTOCOL_REST,
environment: Optional[str] = None,
**kwargs,
):
serving_tool = (
Expand Down Expand Up @@ -91,6 +92,7 @@ def __init__(
self._transformer = util.get_obj_from_json(transformer, Transformer)
self._validate_script_file(self._model_framework, self._script_file)
self._api_protocol = api_protocol
self._environment = environment

def deploy(self):
"""Create a deployment for this predictor and persists it in the Model Serving.
Expand Down Expand Up @@ -268,6 +270,9 @@ def extract_fields_from_json(cls, json_decamelized):
kwargs["created_at"] = json_decamelized.pop("created")
kwargs["creator"] = json_decamelized.pop("creator")
kwargs["api_protocol"] = json_decamelized.pop("api_protocol")
if "environment_dto" in json_decamelized:
environment = json_decamelized.pop("environment_dto")
kwargs["environment"] = environment["name"]
return kwargs

def update_from_response_json(self, json_dict):
Expand Down Expand Up @@ -296,6 +301,8 @@ def to_dict(self):
"predictor": self._script_file,
"apiProtocol": self._api_protocol,
}
if self.environment is not None:
json = {**json, **{"environmentDTO": {"name": self._environment}}}
if self._resources is not None:
json = {**json, **self._resources.to_dict()}
if self._inference_logger is not None:
Expand Down Expand Up @@ -457,6 +464,15 @@ def api_protocol(self):
def api_protocol(self, api_protocol):
self._api_protocol = api_protocol

@property
def environment(self):
"""Name of the inference environment"""
return self._environment

@environment.setter
def environment(self, environment):
self._environment = environment

def __repr__(self):
desc = (
f", description: {self._description!r}"
Expand Down
27 changes: 27 additions & 0 deletions python/tests/fixtures/predictor_fixtures.json
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,9 @@
"inference_logging": "ALL",
"kafka_topic_dto": {
"name": "topic"
},
"environment_dto": {
"name": "misc-inference-pipeline"
}
}
]
Expand Down Expand Up @@ -92,6 +95,9 @@
"inference_logging": "ALL",
"kafka_topic_dto": {
"name": "topic"
},
"environment_dto": {
"name": "misc-inference-pipeline"
}
},
{
Expand Down Expand Up @@ -131,6 +137,9 @@
"inference_logging": "ALL",
"kafka_topic_dto": {
"name": "topic"
},
"environment_dto": {
"name": "misc-inference-pipeline"
}
}
]
Expand Down Expand Up @@ -160,6 +169,9 @@
"inference_logging": "ALL",
"kafka_topic_dto": {
"name": "topic"
},
"environment_dto": {
"name": "tensorflow-inference-pipeline"
}
}
},
Expand Down Expand Up @@ -200,6 +212,9 @@
"inference_logging": "ALL",
"kafka_topic_dto": {
"name": "topic"
},
"environment_dto": {
"name": "tensorflow-inference-pipeline"
}
}
},
Expand Down Expand Up @@ -235,6 +250,9 @@
"inference_logging": "ALL",
"kafka_topic_dto": {
"name": "topic"
},
"environment_dto": {
"name": "misc-inference-pipeline"
}
}
},
Expand Down Expand Up @@ -277,6 +295,9 @@
},
"kafka_topic_dto": {
"name": "topic"
},
"environment_dto": {
"name": "misc-inference-pipeline"
}
}
},
Expand Down Expand Up @@ -312,6 +333,9 @@
},
"kafka_topic_dto": {
"name": "topic"
},
"environment_dto": {
"name": "misc-inference-pipeline"
}
}
},
Expand Down Expand Up @@ -354,6 +378,9 @@
},
"kafka_topic_dto": {
"name": "topic"
},
"environment_dto": {
"name": "misc-inference-pipeline"
}
}
},
Expand Down
2 changes: 1 addition & 1 deletion python/tests/test_constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ def test_model_framework_constants(self):

def test_model_registry_constants(self):
# Arrange
hopsfs_mount_prefix = {"HOPSFS_MOUNT_PREFIX": "/home/yarnapp/hopsfs/"}
hopsfs_mount_prefix = {"HOPSFS_MOUNT_PREFIX": "/hopsfs/"}

# Assert
self._check_added_modified_or_removed_values(
Expand Down
2 changes: 2 additions & 0 deletions python/tests/test_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -203,6 +203,7 @@ def test_deploy(self, mocker, backend_fixtures):
inference_batcher=inference_batcher,
transformer=transformer,
api_protocol=p_json["api_protocol"],
environment=p_json["environment_dto"]["name"],
)

# Assert
Expand All @@ -218,6 +219,7 @@ def test_deploy(self, mocker, backend_fixtures):
inference_batcher=inference_batcher,
transformer=transformer,
api_protocol=p_json["api_protocol"],
environment=p_json["environment_dto"]["name"],
)
mock_predictor.deploy.assert_called_once()

Expand Down
6 changes: 6 additions & 0 deletions python/tests/test_predictor.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,7 @@ def test_from_response_json_singleton(self, mocker, backend_fixtures):
assert p.serving_tool == p_json["serving_tool"]
assert p.api_protocol == p_json["api_protocol"]
assert p.artifact_version == p_json["artifact_version"]
assert p.environment == p_json["environment_dto"]["name"]
assert p.script_file == p_json["predictor"]
assert isinstance(p.resources, resources.PredictorResources)
assert isinstance(p.transformer, transformer.Transformer)
Expand Down Expand Up @@ -119,6 +120,7 @@ def test_from_response_json_list(self, mocker, backend_fixtures):
assert p.model_server == p_json["model_server"]
assert p.serving_tool == p_json["serving_tool"]
assert p.api_protocol == p_json["api_protocol"]
assert p.environment == p_json["environment_dto"]["name"]
assert p.artifact_version == p_json["artifact_version"]
assert p.script_file == p_json["predictor"]
assert isinstance(p.resources, resources.PredictorResources)
Expand Down Expand Up @@ -156,6 +158,7 @@ def test_from_response_json_single(self, mocker, backend_fixtures):
assert p.model_server == p_json["model_server"]
assert p.serving_tool == p_json["serving_tool"]
assert p.api_protocol == p_json["api_protocol"]
assert p.environment == p_json["environment_dto"]["name"]
assert p.artifact_version == p_json["artifact_version"]
assert p.script_file == p_json["predictor"]
assert isinstance(p.resources, resources.PredictorResources)
Expand Down Expand Up @@ -207,6 +210,7 @@ def test_constructor(self, mocker, backend_fixtures):
model_server=p_json["model_server"],
serving_tool=p_json["serving_tool"],
api_protocol=p_json["api_protocol"],
environment=p_json["environment_dto"]["name"],
artifact_version=p_json["artifact_version"],
script_file=p_json["predictor"],
resources=p_json["predictor_resources"],
Expand Down Expand Up @@ -234,6 +238,7 @@ def test_constructor(self, mocker, backend_fixtures):
assert p.model_server == p_json["model_server"]
assert p.serving_tool == p_json["serving_tool"]
assert p.api_protocol == p_json["api_protocol"]
assert p.environment == p_json["environment_dto"]["name"]
assert p.artifact_version == p_json["artifact_version"]
assert p.script_file == p_json["predictor"]
assert isinstance(p.resources, resources.PredictorResources)
Expand Down Expand Up @@ -645,6 +650,7 @@ def extract_fields_from_json(self, mocker, backend_fixtures):
p_json["batching_configuration"]["batching_enabled"]
)
assert kwargs["api_protocol"] == p_json["api_protocol"]
assert kwargs["environment"] == p_json["environment_dto"]["name"]
assert isinstance(kwargs["transformer"], transformer.Transformer)
assert kwargs["transformer"].script_file == p_json["transformer"]
assert isinstance(
Expand Down

0 comments on commit 5d6bca9

Please sign in to comment.