-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy pathutils.py
55 lines (48 loc) · 1.5 KB
/
utils.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
import torch
import numpy as np
import matplotlib.pyplot as plt
def fwd_gradients(obj, x):
dummy = torch.ones_like(obj)
derivative = torch.autograd.grad(obj, x, dummy, create_graph= True)[0]
return derivative
def burgers_equation(u, tx):
u_tx = fwd_gradients(u, tx)
u_t = u_tx[:, 0:1]
u_x = u_tx[:, 1:2]
u_xx = fwd_gradients(u_x, tx)[:, 1:2]
e = u_t + u*u_x - (0.01/np.pi)*u_xx
return e
def ac_equation(u, tx):
u_tx = fwd_gradients(u, tx)
u_t = u_tx[:, 0:1]
u_x = u_tx[:, 1:2]
u_xx = fwd_gradients(u_x, tx)[:, 1:2]
e = u_t -0.0001*u_xx + 5*u**3 - 5*u
return e
def resplot(x, t, t_data, x_data, Exact, u_pred):
plt.figure(figsize=(10, 10))
plt.subplot(2, 2, 1)
plt.plot(x, Exact[:,0],'-')
plt.plot(x, u_pred[:,0],'--')
plt.legend(['Reference', 'Prediction'])
plt.title("Initial condition ($t=0$)")
plt.subplot(2, 2, 2)
t_step = int(0.25*len(t))
plt.plot(x, Exact[:,t_step],'-')
plt.plot(x, u_pred[:,t_step],'--')
plt.legend(['Reference', 'Prediction'])
plt.title("$t=0.25$")
plt.subplot(2, 2, 3)
t_step = int(0.5*len(t))
plt.plot(x, Exact[:,t_step],'-')
plt.plot(x, u_pred[:,t_step],'--')
plt.legend(['Reference', 'Prediction'])
plt.title("$t=0.5$")
plt.subplot(2, 2, 4)
t_step = int(0.99*len(t))
plt.plot(x, Exact[:,t_step],'-')
plt.plot(x, u_pred[:,t_step],'--')
plt.legend(['Reference', 'Prediction'])
plt.title("$t=0.99$")
plt.show()
plt.close()