From 6c20d40f4ba370050f13cee831583a578456507d Mon Sep 17 00:00:00 2001 From: WangZhen <23097963+0x45f@users.noreply.github.com> Date: Mon, 3 Jul 2023 21:06:06 +0800 Subject: [PATCH 1/4] Fix dispatch getattr error (#233) --- sot/opcode_translator/executor/variable_dispatch.py | 2 +- tests/test_builtin_dispatch.py | 12 ++++++++++++ 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/sot/opcode_translator/executor/variable_dispatch.py b/sot/opcode_translator/executor/variable_dispatch.py index d986b4ad5..6c21031ae 100644 --- a/sot/opcode_translator/executor/variable_dispatch.py +++ b/sot/opcode_translator/executor/variable_dispatch.py @@ -94,7 +94,7 @@ getattr, ("VariableBase", "ConstantVariable"), {}, - lambda var, name, default: var.getattr(name.get_value(), default), + lambda var, name: var.getattr(name.get_value()), ) Dispatcher.register( getattr, diff --git a/tests/test_builtin_dispatch.py b/tests/test_builtin_dispatch.py index a1803f718..f22b26781 100644 --- a/tests/test_builtin_dispatch.py +++ b/tests/test_builtin_dispatch.py @@ -22,5 +22,17 @@ def test_dispatch_bool(self): self.assert_results(dispatch_bool, paddle.to_tensor([1, 2, 3])) +def run_getattr(x: paddle.Tensor): + attr = 'dtype' + out = getattr(x, attr) + return out + + +class TestGetattr(TestCaseBase): + def test_getattr(self): + x = paddle.to_tensor(4) + self.assert_results(run_getattr, x) + + if __name__ == "__main__": unittest.main() From 5a5bbcf10406d2f027c1b2f07362b1d95e66eccf Mon Sep 17 00:00:00 2001 From: xiongkun Date: Tue, 4 Jul 2023 10:47:45 +0800 Subject: [PATCH 2/4] [BugFix] fix resnet and resnetv2 (#231) --- sot/opcode_translator/executor/opcode_executor.py | 10 ++++++++-- tests/run_all_paddle_ci.sh | 2 -- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/sot/opcode_translator/executor/opcode_executor.py b/sot/opcode_translator/executor/opcode_executor.py index a2990c3f2..e5fbfb90c 100644 --- a/sot/opcode_translator/executor/opcode_executor.py +++ b/sot/opcode_translator/executor/opcode_executor.py @@ -705,7 +705,6 @@ def ROT_FOUR(self, instr: Instruction): BINARY_OR = tos_op_wrapper(operator.or_) BINARY_XOR = tos_op_wrapper(operator.xor) - @call_break_graph_decorator(push_n=1) def BINARY_SUBSCR(self, instr: Instruction): key = self.pop() container = self.pop() @@ -736,7 +735,6 @@ def BINARY_SUBSCR(self, instr: Instruction): def NOP(self, instr: Instruction): pass - @call_break_graph_decorator(push_n=1) def LOAD_ATTR(self, instr: Instruction): attr_name = instr.argval obj = self.pop() @@ -1768,6 +1766,14 @@ def CALL_FUNCTION_KW(self, instr: Instruction): def CALL_FUNCTION_EX(self, instr: Instruction): super().CALL_FUNCTION_EX(instr) + @call_break_graph_decorator(push_n=1) + def LOAD_ATTR(self, instr: Instruction): + super().LOAD_ATTR(instr) + + @call_break_graph_decorator(push_n=1) + def BINARY_SUBSCR(self, instr: Instruction): + super().BINARY_SUBSCR(instr) + def RETURN_VALUE(self, instr: Instruction): assert ( len(self._stack) == 1 diff --git a/tests/run_all_paddle_ci.sh b/tests/run_all_paddle_ci.sh index 41fa1c927..0b783fb68 100644 --- a/tests/run_all_paddle_ci.sh +++ b/tests/run_all_paddle_ci.sh @@ -10,8 +10,6 @@ disabled_tests=( ${PADDLE_TEST_BASE}/test_list.py # side effect ${PADDLE_TEST_BASE}/test_sentiment.py # disabled unitcase by paddle ${PADDLE_TEST_BASE}/test_reinforcement_learning.py # 'CartPoleEnv' object has no attribute 'seed' - ${PADDLE_TEST_BASE}/test_resnet_v2.py # segment error: oneDNN - ${PADDLE_TEST_BASE}/test_resnet.py # segment error: oneDNN # tmp = x # for i in range(x) # tmp += Linear(x) From 6af0a9e14e3cd1528066ebab0be7ae172a4f08e5 Mon Sep 17 00:00:00 2001 From: feifei-111 <2364819892@qq.com> Date: Tue, 4 Jul 2023 14:28:24 +0800 Subject: [PATCH 3/4] Fix infer layer (#219) --- sot/infer_meta.py | 63 +++++++++++++++++++++++++++++------------------ 1 file changed, 39 insertions(+), 24 deletions(-) diff --git a/sot/infer_meta.py b/sot/infer_meta.py index 93743d0d1..21523d0f7 100644 --- a/sot/infer_meta.py +++ b/sot/infer_meta.py @@ -1,3 +1,5 @@ +import contextlib + import paddle from paddle.fluid.unique_name import UniqueNameGenerator from paddle.fluid.unique_name import guard as UniqueNameGuard @@ -84,6 +86,19 @@ def __init__(self): self.startup_program = Program() self.var_name_generator = UniqueNameGenerator("infer_meta_variable_") + def static_guard(self): + @contextlib.contextmanager + def _static_guard(): + with paddle.fluid.framework._dygraph_guard(None), UniqueNameGuard( + self.var_name_generator + ): + with paddle.static.program_guard( + self.main_program, self.startup_program + ): + yield + + return _static_guard() + def gen_name(self, meta): name = f"{meta.dtype}_{meta.stop_gradient}" for l in meta.shape: @@ -108,27 +123,21 @@ def get_variable(self, meta): return self.var_cache[var_feature_name] def infer_meta(self, func, *args, **kwargs): - with paddle.fluid.framework._dygraph_guard(None), UniqueNameGuard( - self.var_name_generator - ): - args, kwargs = convert_to_variable(args), convert_to_variable( - kwargs - ) - - with paddle.static.program_guard( - self.main_program, self.startup_program - ): - if isinstance(func, str): - # TODO(Aurelius84): Is length of args always greater than 0? - # Do we need add condition check here? - out = getattr(args[0], func)(*args[1:], **kwargs) - else: - out = func(*args, **kwargs) + with self.static_guard(): + args, kwargs = convert_meta_to_variable( + args + ), convert_meta_to_variable(kwargs) + if isinstance(func, str): + # TODO(Aurelius84): Is length of args always greater than 0? + # Do we need add condition check here? + out = getattr(args[0], func)(*args[1:], **kwargs) + else: + out = func(*args, **kwargs) - return variable_to_meta_info(out) + return convert_variable_to_meta_info(out) -def convert_to_variable(args): +def convert_meta_to_variable(args): return map_if( args, pred=lambda x: isinstance(x, MetaInfo), @@ -137,7 +146,7 @@ def convert_to_variable(args): ) -def convert_to_input_spec(args): +def convert_meta_to_input_spec(args): return map_if( args, pred=lambda x: isinstance(x, MetaInfo), @@ -146,7 +155,7 @@ def convert_to_input_spec(args): ) -def variable_to_meta_info(args): +def convert_variable_to_meta_info(args): return map_if( args, pred=lambda x: isinstance(x, paddle.static.Variable), @@ -168,10 +177,16 @@ def infer_meta_for_layer(layer, *args, **kwargs): ), f"Expect a Layer, but got {layer}." layer = paddle.jit.to_static(layer, enable_fallback=False) - args, kwargs = convert_to_input_spec(args), convert_to_input_spec(kwargs) - concrete_program = layer.forward.get_concrete_program(*args, **kwargs)[0] - out = concrete_program.outputs[0] - out = MetaInfo.from_tensor(out) + args_, kwargs_ = convert_meta_to_input_spec( + args + ), convert_meta_to_input_spec(kwargs) + ( + concrete_program, + partial_program_layer, + ) = layer.forward.get_concrete_program(*args_, **kwargs_) + out = partial_program_layer._restore_out( + convert_variable_to_meta_info(concrete_program.outputs) + ) layer.forward.rollback() return out From 019b846b71221f54dbd47fbfe847853fb5dcae3e Mon Sep 17 00:00:00 2001 From: Nyakku Shigure Date: Tue, 4 Jul 2023 16:03:57 +0800 Subject: [PATCH 4/4] Revert "Fix infer layer" (#239) --- sot/infer_meta.py | 63 ++++++++++++++++++----------------------------- 1 file changed, 24 insertions(+), 39 deletions(-) diff --git a/sot/infer_meta.py b/sot/infer_meta.py index 21523d0f7..93743d0d1 100644 --- a/sot/infer_meta.py +++ b/sot/infer_meta.py @@ -1,5 +1,3 @@ -import contextlib - import paddle from paddle.fluid.unique_name import UniqueNameGenerator from paddle.fluid.unique_name import guard as UniqueNameGuard @@ -86,19 +84,6 @@ def __init__(self): self.startup_program = Program() self.var_name_generator = UniqueNameGenerator("infer_meta_variable_") - def static_guard(self): - @contextlib.contextmanager - def _static_guard(): - with paddle.fluid.framework._dygraph_guard(None), UniqueNameGuard( - self.var_name_generator - ): - with paddle.static.program_guard( - self.main_program, self.startup_program - ): - yield - - return _static_guard() - def gen_name(self, meta): name = f"{meta.dtype}_{meta.stop_gradient}" for l in meta.shape: @@ -123,21 +108,27 @@ def get_variable(self, meta): return self.var_cache[var_feature_name] def infer_meta(self, func, *args, **kwargs): - with self.static_guard(): - args, kwargs = convert_meta_to_variable( - args - ), convert_meta_to_variable(kwargs) - if isinstance(func, str): - # TODO(Aurelius84): Is length of args always greater than 0? - # Do we need add condition check here? - out = getattr(args[0], func)(*args[1:], **kwargs) - else: - out = func(*args, **kwargs) + with paddle.fluid.framework._dygraph_guard(None), UniqueNameGuard( + self.var_name_generator + ): + args, kwargs = convert_to_variable(args), convert_to_variable( + kwargs + ) + + with paddle.static.program_guard( + self.main_program, self.startup_program + ): + if isinstance(func, str): + # TODO(Aurelius84): Is length of args always greater than 0? + # Do we need add condition check here? + out = getattr(args[0], func)(*args[1:], **kwargs) + else: + out = func(*args, **kwargs) - return convert_variable_to_meta_info(out) + return variable_to_meta_info(out) -def convert_meta_to_variable(args): +def convert_to_variable(args): return map_if( args, pred=lambda x: isinstance(x, MetaInfo), @@ -146,7 +137,7 @@ def convert_meta_to_variable(args): ) -def convert_meta_to_input_spec(args): +def convert_to_input_spec(args): return map_if( args, pred=lambda x: isinstance(x, MetaInfo), @@ -155,7 +146,7 @@ def convert_meta_to_input_spec(args): ) -def convert_variable_to_meta_info(args): +def variable_to_meta_info(args): return map_if( args, pred=lambda x: isinstance(x, paddle.static.Variable), @@ -177,16 +168,10 @@ def infer_meta_for_layer(layer, *args, **kwargs): ), f"Expect a Layer, but got {layer}." layer = paddle.jit.to_static(layer, enable_fallback=False) - args_, kwargs_ = convert_meta_to_input_spec( - args - ), convert_meta_to_input_spec(kwargs) - ( - concrete_program, - partial_program_layer, - ) = layer.forward.get_concrete_program(*args_, **kwargs_) - out = partial_program_layer._restore_out( - convert_variable_to_meta_info(concrete_program.outputs) - ) + args, kwargs = convert_to_input_spec(args), convert_to_input_spec(kwargs) + concrete_program = layer.forward.get_concrete_program(*args, **kwargs)[0] + out = concrete_program.outputs[0] + out = MetaInfo.from_tensor(out) layer.forward.rollback() return out