Skip to content

Commit

Permalink
Add Analyze Subcommand (#186)
Browse files Browse the repository at this point in the history
* Adding CLI options for analyze along with the subcommand. Updates to underlying classes to support using the CLI.

* Fixing codeQL issues

* Actually raise the exception

* Update help comment

* Refactoring subcommands

* Fixing codeql issues and other small changes from PR

* Refactoring run method to be common btw profile and analyze subcommands

* Fixing codeql issues

* Fixing codeql
  • Loading branch information
nv-braf authored Nov 22, 2024
1 parent 3f92904 commit 2a9e864
Show file tree
Hide file tree
Showing 21 changed files with 820 additions and 298 deletions.
36 changes: 35 additions & 1 deletion genai-perf/genai_perf/config/generate/genai_perf_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.

from argparse import Namespace
from copy import deepcopy
from dataclasses import dataclass

Expand All @@ -22,7 +23,7 @@
ConfigOutputTokens,
ConfigSyntheticTokens,
)
from genai_perf.types import CheckpointObject, ModelObjectiveParameters
from genai_perf.types import CheckpointObject, ModelObjectiveParameters, Parameters


@dataclass
Expand All @@ -37,7 +38,9 @@ def __init__(
self,
config: ConfigCommand,
model_objective_parameters: ModelObjectiveParameters,
args: Namespace = Namespace(),
):
self._args = deepcopy(args)
self._set_options_based_on_config(config)
self._set_options_based_on_objective(model_objective_parameters)

Expand All @@ -51,14 +54,40 @@ def _set_options_based_on_config(self, config: ConfigCommand) -> None:
def _set_options_based_on_objective(
self, model_objective_parameters: ModelObjectiveParameters
) -> None:
self._parameters: Parameters = {}
for objective in model_objective_parameters.values():
for name, parameter in objective.items():
if parameter.usage == SearchUsage.RUNTIME_GAP:
self._parameters[name] = parameter.get_value_based_on_category()
if hasattr(self.input, name):
self.input.__setattr__(
name, parameter.get_value_based_on_category()
)

###########################################################################
# Get Accessor Methods
###########################################################################
def get_parameters(self) -> Parameters:
"""
Returns a dictionary of parameters and their values
"""
return self._parameters

def get_obj_args(self) -> Namespace:
"""
Returns args that can be used by the existing CLI based methods in GAP
These will include any objectives that are set via parameters
"""
obj_args = deepcopy(self._args)
if "input_sequence_length" in self._parameters:
obj_args.synthetic_input_tokens_mean = self._parameters[
"input_sequence_length"
]
if "num_prompts" in self._parameters:
obj_args.num_prompts = self._parameters["num_prompts"]

return obj_args

###########################################################################
# Checkpoint Methods
###########################################################################
Expand All @@ -69,6 +98,9 @@ def create_checkpoint_object(self) -> CheckpointObject:
"""
genai_perf_config_dict = deepcopy(self.__dict__)

# Values set on the CLI are not kept (they can vary from run to run)
del genai_perf_config_dict["_args"]

return genai_perf_config_dict

@classmethod
Expand All @@ -83,6 +115,8 @@ def create_class_from_checkpoint(
config=ConfigCommand([""]),
model_objective_parameters={},
)
genai_perf_config._parameters = genai_perf_config_dict["_parameters"]

genai_perf_config.input = ConfigInput(**genai_perf_config_dict["input"])
genai_perf_config.input.synthetic_tokens = ConfigSyntheticTokens(
**genai_perf_config_dict["input"]["synthetic_tokens"]
Expand Down
55 changes: 40 additions & 15 deletions genai-perf/genai_perf/config/generate/perf_analyzer_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,12 @@
from genai_perf.exceptions import GenAIPerfException
from genai_perf.inputs.input_constants import DEFAULT_INPUT_DATA_JSON
from genai_perf.logging import logging
from genai_perf.types import CheckpointObject, ModelName, ModelObjectiveParameters
from genai_perf.types import (
CheckpointObject,
ModelName,
ModelObjectiveParameters,
Parameters,
)
from genai_perf.utils import convert_option_name
from genai_perf.wrapper import Profiler

Expand Down Expand Up @@ -68,6 +73,12 @@
# required for decoupled models into triton).
"streaming",
"subcommand",
"sweep_list",
"sweep_type",
"sweep_range",
"sweep_min",
"sweep_max",
"sweep_step",
"synthetic_input_files",
"synthetic_input_tokens_mean",
"synthetic_input_tokens_stddev",
Expand Down Expand Up @@ -97,7 +108,7 @@ def __init__(
self._set_options_based_on_cli(args, extra_args)
self._set_options_based_on_config(config)
self._set_options_based_on_objective(model_objective_parameters)
self._set_artifact_paths()
self._set_artifact_paths(model_objective_parameters)

###########################################################################
# Set Options Methods
Expand All @@ -123,21 +134,25 @@ def _set_options_based_on_config(self, config: ConfigCommand) -> None:
def _set_options_based_on_objective(
self, model_objective_parameters: ModelObjectiveParameters
) -> None:
self._parameters: Dict[str, Any] = {}
self._parameters: Parameters = {}
for objective in model_objective_parameters.values():
for name, parameter in objective.items():
if parameter.usage == SearchUsage.RUNTIME_PA:
self._parameters[name] = parameter.get_value_based_on_category()

def _set_artifact_paths(self) -> None:
def _set_artifact_paths(
self, model_objective_parameters: ModelObjectiveParameters
) -> None:
# When restoring from a checkpoint there won't be any args
if not hasattr(self._args, "subcommand"):
return

if self._args.artifact_dir == Path(DEFAULT_ARTIFACT_DIR):
artifact_name = self._get_artifact_model_name()
artifact_name += self._get_artifact_service_kind()
artifact_name += self._get_artifact_stimulus_type()
artifact_name += self._get_artifact_stimulus_type(
model_objective_parameters
)

self._args.artifact_dir = self._args.artifact_dir / Path(
"-".join(artifact_name)
Expand Down Expand Up @@ -191,17 +206,27 @@ def _get_artifact_service_kind(self) -> List[str]:

return service_kind

def _get_artifact_stimulus_type(self) -> List[str]:
if self._args.concurrency:
stimulus = [f"concurrency{self._args.concurrency}"]
elif self._args.request_rate:
stimulus = [f"request_rate{self._args.request_rate}"]
elif "concurrency" in self._parameters:
concurrency = str(self._parameters["concurrency"])
def _get_artifact_stimulus_type(
self, model_objective_parameters: ModelObjectiveParameters
) -> List[str]:
parameters = model_objective_parameters[self._model_name]

if "concurrency" in parameters:
concurrency = str(parameters["concurrency"].get_value_based_on_category())
stimulus = [f"concurrency{concurrency}"]
elif "request_rate" in self._parameters:
request_rate = str(self._parameters["request_rate"])
elif "request_rate" in parameters:
request_rate = str(parameters["request_rate"].get_value_based_on_category())
stimulus = [f"request_rate{request_rate}"]
elif "input_sequence_length" in parameters:
input_sequence_length = str(
parameters["input_sequence_length"].get_value_based_on_category()
)
stimulus = [f"input_sequence_length{input_sequence_length}"]
elif "num_prompts" in parameters:
input_sequence_length = str(
parameters["num_prompts"].get_value_based_on_category()
)
stimulus = [f"num_prompts{input_sequence_length}"]

return stimulus

Expand Down Expand Up @@ -265,7 +290,7 @@ def _add_extra_args(self, extra_args: Optional[List[str]]) -> List[str]:
###########################################################################
# Get Accessor Methods
###########################################################################
def get_parameters(self) -> Dict[str, Any]:
def get_parameters(self) -> Parameters:
"""
Returns a dictionary of parameters and their values
"""
Expand Down
5 changes: 2 additions & 3 deletions genai-perf/genai_perf/config/generate/search_parameters.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ class SearchParameters:
"runtime_batch_size",
"concurrency",
"request_rate",
"input_sequence_length",
]

linear_range_parameters = ["instance_count", "num_prompts"]
Expand All @@ -50,9 +51,7 @@ class SearchParameters:

runtime_pa_parameters = ["runtime_batch_size", "concurrency", "request_rate"]

runtime_gap_parameters = [
"num_prompts",
]
runtime_gap_parameters = ["num_prompts", "input_sequence_length"]

all_parameters = model_parameters + runtime_pa_parameters + runtime_gap_parameters

Expand Down
6 changes: 5 additions & 1 deletion genai-perf/genai_perf/config/run/run_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
MetricObjectives,
ModelName,
ModelWeights,
Parameters,
PerfMetricName,
PerfRecords,
RunConfigName,
Expand Down Expand Up @@ -95,7 +96,10 @@ def create_class_from_checkpoint(
###########################################################################
# Get Accessor Methods
###########################################################################
def get_perf_analyzer_parameters(self) -> Dict[str, Any]:
def get_genai_perf_parameters(self) -> Parameters:
return self.genai_perf_config.get_parameters()

def get_perf_analyzer_parameters(self) -> Parameters:
return self.perf_analyzer_config.get_parameters()

def get_all_gpu_metrics(self) -> GpuRecords:
Expand Down
2 changes: 1 addition & 1 deletion genai-perf/genai_perf/export_data/output_reporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
from genai_perf.export_data.data_exporter_factory import DataExporterFactory
from genai_perf.export_data.exporter_config import ExporterConfig
from genai_perf.metrics import Statistics, TelemetryStatistics
from genai_perf.parser import get_extra_inputs_as_dict
from genai_perf.subcommand.common import get_extra_inputs_as_dict


class OutputReporter:
Expand Down
10 changes: 10 additions & 0 deletions genai-perf/genai_perf/logging.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,16 @@ def init_logging() -> None:
"level": "DEBUG",
"propagate": False,
},
"genai_perf.subcommand.analyze": {
"handlers": ["console"],
"level": "DEBUG",
"propagate": False,
},
"genai_perf.subcommand.common": {
"handlers": ["console"],
"level": "DEBUG",
"propagate": False,
},
},
}
logging.config.dictConfig(LOGGING_CONFIG)
Expand Down
Loading

0 comments on commit 2a9e864

Please sign in to comment.