Skip to content

Commit

Permalink
all lint errors fixed
Browse files Browse the repository at this point in the history
Signed-off-by: Shashank Mittal <[email protected]>
  • Loading branch information
shashank-iitbhu committed Sep 2, 2024
1 parent 1be4e8d commit 852b4eb
Show file tree
Hide file tree
Showing 3 changed files with 54 additions and 43 deletions.
56 changes: 27 additions & 29 deletions pkg/suggestion/v1beta1/hyperopt/base_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,15 +17,15 @@
import hyperopt
import numpy as np

from pkg.suggestion.v1beta1.internal.constant import CATEGORICAL
from pkg.suggestion.v1beta1.internal.constant import DISCRETE
from pkg.suggestion.v1beta1.internal.constant import DOUBLE
from pkg.suggestion.v1beta1.internal.constant import INTEGER
from pkg.suggestion.v1beta1.internal.constant import MAX_GOAL
from pkg.suggestion.v1beta1.internal.constant import UNIFORM
from pkg.suggestion.v1beta1.internal.constant import LOG_UNIFORM
from pkg.suggestion.v1beta1.internal.constant import NORMAL
from pkg.suggestion.v1beta1.internal.constant import LOG_NORMAL
from pkg.suggestion.v1beta1.internal.constant import (
CATEGORICAL,
DISCRETE,
DOUBLE,
INTEGER,
LOG_UNIFORM,
MAX_GOAL,
UNIFORM,
)
from pkg.suggestion.v1beta1.internal.trial import Assignment

logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -66,39 +66,37 @@ def create_hyperopt_domain(self):
for param in self.search_space.params:
if param.type == INTEGER:
hyperopt_search_space[param.name] = hyperopt.hp.uniformint(
param.name,
float(param.min),
float(param.max))
param.name, float(param.min), float(param.max)
)
elif param.type == DOUBLE:
if param.distribution == UNIFORM:
if param.step:
hyperopt_search_space[param.name] = hyperopt.hp.quniform(
param.name,
float(param.min),
float(param.max),
float(param.step))
param.name,
float(param.min),
float(param.max),
float(param.step),
)
else:
hyperopt_search_space[param.name] = hyperopt.hp.uniform(
param.name,
float(param.min),
float(param.max))
param.name, float(param.min), float(param.max)
)
elif param.distribution == LOG_UNIFORM:
if param.step:
hyperopt_search_space[param.name] = hyperopt.hp.qloguniform(
param.name,
float(param.min),
float(param.max),
float(param.step))
param.name,
float(param.min),
float(param.max),
float(param.step),
)
else:
hyperopt_search_space[param.name] = hyperopt.hp.loguniform(
param.name,
float(param.min),
float(param.max))
param.name, float(param.min), float(param.max)
)
else:
hyperopt_search_space[param.name] = hyperopt.hp.uniform(
param.name,
float(param.min),
float(param.max))
param.name, float(param.min), float(param.max)
)
elif param.type == CATEGORICAL or param.type == DISCRETE:
hyperopt_search_space[param.name] = hyperopt.hp.choice(
param.name, param.list
Expand Down
39 changes: 26 additions & 13 deletions pkg/suggestion/v1beta1/internal/search_space.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,22 +82,36 @@ def __str__(self):

@staticmethod
def convert_parameter(p):
distribution = p.feasible_space.distribution if p.feasible_space.distribution != "" and p.feasible_space.distribution is not None and p.feasible_space.distribution != api.DISTRIBUTION_UNKNOWN else None

distribution = (
p.feasible_space.distribution
if p.feasible_space.distribution != ""
and p.feasible_space.distribution is not None
and p.feasible_space.distribution != api.DISTRIBUTION_UNKNOWN
else None
)

if p.parameter_type == api.INT:
# Default value for INT parameter step is 1
step = p.feasible_space.step if p.feasible_space.step else 1
return HyperParameter.int(p.name, p.feasible_space.min, p.feasible_space.max, step, distribution)

return HyperParameter.int(
p.name, p.feasible_space.min, p.feasible_space.max, step, distribution
)

elif p.parameter_type == api.DOUBLE:
return HyperParameter.double(p.name, p.feasible_space.min, p.feasible_space.max, p.feasible_space.step, distribution)

return HyperParameter.double(
p.name,
p.feasible_space.min,
p.feasible_space.max,
p.feasible_space.step,
distribution,
)

elif p.parameter_type == api.CATEGORICAL:
return HyperParameter.categorical(p.name, p.feasible_space.list)

elif p.parameter_type == api.DISCRETE:
return HyperParameter.discrete(p.name, p.feasible_space.list)

else:
logger.error(
"Cannot get the type for the parameter: %s (%s)",
Expand All @@ -118,8 +132,9 @@ def __init__(self, name, type_, min_, max_, list_, step, distribution=None):

def __str__(self):
if self.type in [constant.INTEGER, constant.DOUBLE]:
return "HyperParameter(name: {}, type: {}, min: {}, max: {}, step: {}, distribution: {})".format(
self.name, self.type, self.min, self.max, self.step, self.distribution
return (
f"HyperParameter(name: {self.name}, type: {self.type}, min: {self.min}, "
f"max: {self.max}, step: {self.step}, distribution: {self.distribution})"
)
else:
return "HyperParameter(name: {}, type: {}, list: {})".format(
Expand All @@ -134,9 +149,7 @@ def int(name, min_, max_, step, distribution=None):

@staticmethod
def double(name, min_, max_, step, distribution=None):
return HyperParameter(
name, constant.DOUBLE, min_, max_, [], step, distribution
)
return HyperParameter(name, constant.DOUBLE, min_, max_, [], step, distribution)

@staticmethod
def categorical(name, lst):
Expand Down
2 changes: 1 addition & 1 deletion test/unit/v1beta1/suggestion/test_hyperopt_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,11 @@

import grpc
import grpc_testing
from pkg.suggestion.v1beta1.internal.constant import LOG_UNIFORM
import utils

from pkg.apis.manager.v1beta1.python import api_pb2
from pkg.suggestion.v1beta1.hyperopt.service import HyperoptService
from pkg.suggestion.v1beta1.internal.constant import LOG_UNIFORM


class TestHyperopt(unittest.TestCase):
Expand Down

0 comments on commit 852b4eb

Please sign in to comment.