diff --git a/cm-mlops/script/app-loadgen-generic-python/_cm.yaml b/cm-mlops/script/app-loadgen-generic-python/_cm.yaml index a7b1c2c49..913814e3b 100644 --- a/cm-mlops/script/app-loadgen-generic-python/_cm.yaml +++ b/cm-mlops/script/app-loadgen-generic-python/_cm.yaml @@ -30,6 +30,7 @@ default_env: input_mapping: modelpath: CM_ML_MODEL_FILE_WITH_PATH modelcodepath: CM_ML_MODEL_CODE_WITH_PATH + modelcfgpath: CM_ML_MODEL_CFG_WITH_PATH modelsamplepath: CM_ML_MODEL_SAMPLE_WITH_PATH output_dir: CM_MLPERF_OUTPUT_DIR scenario: CM_MLPERF_LOADGEN_SCENARIO @@ -255,9 +256,11 @@ input_description: modelpath: desc: Full path to file with model weights modelcodepath: - desc: (for python models) Full path to file with model code + desc: (for PyTorch models) Full path to file with model code and cm.py + modelcfgpath: + desc: (for PyTorch models) Full path to JSON file with model cfg modelsamplepath: - desc: (for python models) Full path to file with model sample in pickle format + desc: (for PyTorch models) Full path to file with model sample in pickle format ep: desc: ONNX Execution provider scenario: diff --git a/cm-mlops/script/app-loadgen-generic-python/customize.py b/cm-mlops/script/app-loadgen-generic-python/customize.py index d4180facf..5ac7b57cb 100644 --- a/cm-mlops/script/app-loadgen-generic-python/customize.py +++ b/cm-mlops/script/app-loadgen-generic-python/customize.py @@ -1,3 +1,5 @@ +# Developer: Grigori Fursin + from cmind import utils import os import shutil @@ -49,6 +51,9 @@ def preprocess(i): if env.get('CM_ML_MODEL_CODE_WITH_PATH', '') != '': run_opts +=" --model_code "+env['CM_ML_MODEL_CODE_WITH_PATH'] + if env.get('CM_ML_MODEL_CFG_WITH_PATH', '') != '': + run_opts +=" --model_cfg "+env['CM_ML_MODEL_CFG_WITH_PATH'] + if env.get('CM_ML_MODEL_SAMPLE_WITH_PATH', '') != '': run_opts +=" --model_sample_pickle "+env['CM_ML_MODEL_SAMPLE_WITH_PATH'] diff --git a/cm-mlops/script/app-loadgen-generic-python/src/backend_onnxruntime.py b/cm-mlops/script/app-loadgen-generic-python/src/backend_onnxruntime.py index bd52fba94..e95e467b9 100644 --- a/cm-mlops/script/app-loadgen-generic-python/src/backend_onnxruntime.py +++ b/cm-mlops/script/app-loadgen-generic-python/src/backend_onnxruntime.py @@ -44,6 +44,7 @@ def __init__( intra_op_threads=0, inter_op_threads=0, model_code='', # Not used here + model_cfg={}, # Not used here model_sample_pickle='' # Not used here ): self.model_path = model_path diff --git a/cm-mlops/script/app-loadgen-generic-python/src/backend_pytorch.py b/cm-mlops/script/app-loadgen-generic-python/src/backend_pytorch.py index 839948394..618fbfc9b 100644 --- a/cm-mlops/script/app-loadgen-generic-python/src/backend_pytorch.py +++ b/cm-mlops/script/app-loadgen-generic-python/src/backend_pytorch.py @@ -1,3 +1,5 @@ +# Developer: Grigori Fursin + import typing import importlib import os @@ -42,11 +44,13 @@ def __init__( intra_op_threads=0, inter_op_threads=0, model_code='', + model_cfg={}, model_sample_pickle='' ): self.model_path = model_path self.model_code = model_code + self.model_cfg = model_cfg self.model_sample_pickle = model_sample_pickle self.execution_provider = execution_provider @@ -80,7 +84,10 @@ def create(self) -> Model: del(sys.path[0]) # Init model - model = model_module.model_init(checkpoint, 'model_state_dict') + if len(self.model_cfg)>0: + print ('Model cfg: {}'.format(self.model_cfg)) + + model = model_module.model_init(checkpoint, self.model_cfg) model.eval() return XModel(model) diff --git a/cm-mlops/script/app-loadgen-generic-python/src/main.py b/cm-mlops/script/app-loadgen-generic-python/src/main.py index 2c30a16dd..f291b1446 100644 --- a/cm-mlops/script/app-loadgen-generic-python/src/main.py +++ b/cm-mlops/script/app-loadgen-generic-python/src/main.py @@ -24,6 +24,7 @@ def main( backend: str, model_path: str, model_code: str, + model_cfg: str, model_sample_pickle: str, output_path: typing.Optional[str], runner_name: str, @@ -48,6 +49,14 @@ def main( else: raise Exception("Error: backend is not recognized.") + # Load model cfg + model_cfg_dict = {} + if model_cfg!='': + import json + + with open(model_cfg) as mc: + model_cfg_dict = json.load(mc) + model_factory = XModelFactory( model_path, execution_provider, @@ -55,6 +64,7 @@ def main( interop_threads, intraop_threads, model_code, + model_cfg_dict, model_sample_pickle ) @@ -205,6 +215,7 @@ def main( parser.add_argument("--loadgen_expected_qps", help="Expected QPS", default=1, type=float) parser.add_argument("--loadgen_duration_sec", help="Expected duration in sec.", default=1, type=float) parser.add_argument("--model_code", help="(for PyTorch models) path to model code with cm.py", default="") + parser.add_argument("--model_cfg", help="(for PyTorch models) path to model's configuration in JSON file", default="") parser.add_argument("--model_sample_pickle", help="(for PyTorch models) path to a model sample in pickle format", default="") args = parser.parse_args() @@ -212,6 +223,7 @@ def main( args.backend, args.model_path, args.model_code, + args.model_cfg, args.model_sample_pickle, args.output, args.runner, diff --git a/cm-mlops/script/app-loadgen-generic-python/src/utils.py b/cm-mlops/script/app-loadgen-generic-python/src/utils.py index da6dc8540..8c182650c 100644 --- a/cm-mlops/script/app-loadgen-generic-python/src/utils.py +++ b/cm-mlops/script/app-loadgen-generic-python/src/utils.py @@ -1,3 +1,5 @@ +# Developer: Grigori Fursin + import os import psutil