Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

tvm relay backend #79

Merged
merged 25 commits into from
Oct 12, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -23,3 +23,9 @@ src/paddlefx/_version.py
.cache/
*.so
tmp/

# viztracer
result.json

# mlir
*.mlir
27 changes: 0 additions & 27 deletions examples/TODO/resnet_dynamo.py

This file was deleted.

108 changes: 0 additions & 108 deletions examples/TODO/simple_dynamo.py

This file was deleted.

46 changes: 46 additions & 0 deletions examples/resnet_dynamo.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
from __future__ import annotations

import numpy as np
import paddle
import paddle.nn
import paddle.tensor

from paddle.vision.models import resnet18

import paddlefx

from paddlefx.compiler.tvm import TVMCompiler

paddle.seed(1234)
# logging.getLogger().setLevel(logging.DEBUG)

compiler = TVMCompiler(
full_graph=True,
print_tabular_mode="rich",
tune_mode="auto_scheduler",
# target="llvm -mcpu=core-avx2",
# target="cuda",
)
net = resnet18(pretrained=True, num_classes=2)
net.eval() # from_paddle have no arg to set training mode.
optimized_net = paddlefx.optimize(net, backend=compiler)

x = paddle.rand([1, 3, 224, 224], dtype="float32")
out = net(x)
res = optimized_net(x)
np.testing.assert_allclose(res.numpy(), out.numpy(), rtol=1e-5, atol=1e-6)

import time

start = time.time()
for _ in range(10):
out = net(x)
end = time.time()
print("net: ", end - start)

start = time.time()
for _ in range(10):
res = optimized_net(x)

end = time.time()
print("optimized_net: ", end - start)
4 changes: 3 additions & 1 deletion examples/resnet_trace.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,12 @@

from paddlefx import symbolic_trace

paddle.seed(1234)

net = resnet18()
traced_layer = symbolic_trace(net)

example_input = paddle.rand([2, 3, 224, 224])
example_input = paddle.rand([2, 3, 24, 24])
orig_output = net(example_input)
traced_output = traced_layer(example_input)

Expand Down
26 changes: 14 additions & 12 deletions examples/simple_compiler.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
from __future__ import annotations

import logging

import numpy as np
import paddle
import paddle.nn
Expand All @@ -11,15 +9,15 @@

from paddlefx.compiler import TVMCompiler

paddle.seed(0)
paddle.seed(1234)

logging.getLogger().setLevel(logging.DEBUG)
# logging.getLogger().setLevel(logging.DEBUG)


def inner_func(x, y):
p = paddle.add(x, y)
q = paddle._C_ops.subtract(x, y)
z = p * q
# q = paddle._C_ops.subtract(x, y) # static unsupported
z = p * x
return z / y


Expand All @@ -28,11 +26,15 @@ def func(a, b):
return d


optimized_net = paddlefx.optimize(func, backend=TVMCompiler(print_tabular=True))
optimized_func = paddlefx.optimize(
func, backend=TVMCompiler(full_graph=True, print_tabular_mode="rich")
)

x = paddle.rand([1, 224])
y = paddle.rand([1, 224])
out = func(x, y)
res = optimized_net(x, y)
x = paddle.rand([4, 6, 1])
y = paddle.rand([4, 6, 224])
for _ in range(10):
res = optimized_func(x, y)
res = optimized_func(y, x)
out = func(y, x)

np.testing.assert_equal(res.numpy(), out.numpy())
np.testing.assert_equal(res.numpy(), out.numpy())
50 changes: 50 additions & 0 deletions examples/simple_dynamo.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
from __future__ import annotations

import logging

import numpy as np
import paddle
import paddle.nn

import paddlefx

from paddlefx.compiler import DummyCompiler, TVMCompiler

logging.getLogger().setLevel(logging.DEBUG)
static_compier = DummyCompiler(full_graph=True, print_tabular_mode="rich")
compiler = TVMCompiler(full_graph=True, print_tabular_mode="rich")


def check_func(func, *args, backend: None = None):
if backend is None:
comiled_func = paddlefx.optimize(func)
else:
comiled_func = paddlefx.optimize(func, backend=backend)
out = func(*args)
res = comiled_func(*args)
if isinstance(out, tuple):
for i in range(len(res)):
np.testing.assert_allclose(res[i], out[i])
else:
np.testing.assert_allclose(res, out, rtol=1e-5, atol=1e-6)


class SimpleNet(paddle.nn.Layer):
def __init__(self):
super().__init__()
self.fc1 = paddle.nn.Linear(16, 4)
self.fc2 = paddle.nn.Linear(16, 1)

def forward(self, a, b):
c = self.fc1(a)
d = self.fc2(b)
e = paddle.add(c, d)
return e


net = SimpleNet()


in_a = paddle.rand([8, 16])
in_b = paddle.rand([8, 16])
check_func(net, in_a, in_b, backend=static_compier)
6 changes: 3 additions & 3 deletions examples/targets/target_3_add_paddle.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,16 +15,16 @@
logging.basicConfig(level=logging.DEBUG, format="%(message)s")
# logging.basicConfig(level=logging.INFO, format="%(message)s")

paddle.seed(0)
paddle.seed(1234)


def func(x, y):
z = paddle.add(x, y)
o = paddle._C_ops.add(z, z)
o = paddle._C_ops.add(z, z) # type: ignore
return o


@paddlefx.optimize(backend=TVMCompiler(print_tabular=True))
@paddlefx.optimize(backend=TVMCompiler(print_tabular_mode="rich"))
def net(a, b):
c = func(a, b)
return c
Expand Down
3 changes: 2 additions & 1 deletion requirements_dev.txt
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,8 @@ pre-commit>=3.0.0

tabulate==0.9.0

apache-tvm>=0.11.1
apache-tvm==0.14.dev214
xgboost

# debug python with paddle
opencv-python-headless
60 changes: 60 additions & 0 deletions src/paddlefx/cache_manager.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
from __future__ import annotations

import dataclasses
import types

from typing import TYPE_CHECKING, Callable

if TYPE_CHECKING:
GuardFunction = Callable[[types.FrameType], bool]
GuardedCodes = list["GuardedCode"]


@dataclasses.dataclass
class GuardedCode:
code: types.CodeType
guard_fn: GuardFunction


class CodeCacheManager:
cache_dict: dict[types.CodeType, GuardedCodes] = {}

@classmethod
def add_cache(cls, code: types.CodeType, guarded_code: GuardedCode):
cls.cache_dict.setdefault(code, [])
cls.cache_dict[code].append(guarded_code)

@classmethod
def get_cache(cls, frame: types.FrameType) -> GuardedCode | None:
code: types.CodeType = frame.f_code
if code not in cls.cache_dict:
print(f"Firstly call {code}\n")
return None
return cls.lookup(frame, cls.cache_dict[code])

@classmethod
def clear_cache(cls):
cls.cache_dict.clear()

@classmethod
def lookup(
cls, frame: types.FrameType, guarded_codes: GuardedCodes
) -> GuardedCode | None:
for guarded_code in guarded_codes:
try:
guard_fn = guarded_code.guard_fn
if guard_fn(frame):
print(
f"[Cache]: Cache hit, GuardFunction is {guard_fn}\n",
)
return guarded_code
else:
print(
f"[Cache]: Cache miss, GuardFunction is {guard_fn}\n",
)
except Exception as e:
print(f"[Cache]: GuardFunction function error: {e}\n")
continue

print("[Cache]: all guards missed\n")
return None
Loading