Skip to content
This repository was archived by the owner on Oct 25, 2024. It is now read-only.

Commit

Permalink
Update WOQ AutoRoundConfig parameter (#1568)
Browse files Browse the repository at this point in the history
  • Loading branch information
changwangss authored May 24, 2024
1 parent 0e13607 commit 5e5e17c
Show file tree
Hide file tree
Showing 3 changed files with 5 additions and 5 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@
help="minmax learning rate, if None,it will beset to be the same with lr",
)
parser.add_argument(
"--enable_quanted_input",
"--disable_quanted_input",
action="store_true",
help="whether to use the output of quantized block to tune the next block",
)
Expand Down Expand Up @@ -286,7 +286,7 @@
calib_len=args.calib_len,
lr=args.lr,
minmax_lr=args.minmax_lr,
enable_quanted_input=args.enable_quanted_input,
disable_quanted_input=args.disable_quanted_input,
use_ipex=args.use_ipex,
)
else:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -527,7 +527,7 @@ def default_calib_func(model):
"seqlen": config.calib_len,
"iters": config.iters,
"scale_dtype": config.scale_dtype,
"enable_quanted_input": config.enable_quanted_input,
"enable_quanted_input": not config.disable_quanted_input,
"lr": config.lr,
"minmax_lr": config.minmax_lr,
}
Expand Down
4 changes: 2 additions & 2 deletions intel_extension_for_transformers/transformers/utils/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -1056,7 +1056,7 @@ def __init__(
sym: bool = False,
lr: float = None,
minmax_lr: float = None,
enable_quanted_input: bool = True,
disable_quanted_input: bool = False,
nsamples: int = 512,
iters: int = 200,
use_ggml: bool = False,
Expand All @@ -1083,7 +1083,7 @@ def __init__(
self.group_size = group_size
self.lr = lr
self.minmax_lr = minmax_lr
self.enable_quanted_input = enable_quanted_input
self.disable_quanted_input = disable_quanted_input
self.iters = iters
self.llm_int8_skip_modules = (
llm_int8_skip_modules if llm_int8_skip_modules else []
Expand Down

0 comments on commit 5e5e17c

Please sign in to comment.