Skip to content

Commit

Permalink
Fix
Browse files Browse the repository at this point in the history
  • Loading branch information
co63oc committed Sep 19, 2023
1 parent b3c6919 commit c46baa4
Show file tree
Hide file tree
Showing 3 changed files with 17 additions and 15 deletions.
2 changes: 2 additions & 0 deletions jointContribution/PIRBN/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,8 @@ For more details in terms of mathematical proofs and numerical examples, please

<https://github.com/JinshuaiBai/PIRBN>

<https://arxiv.org/ftp/arxiv/papers/2304/2304.06234.pdf>

# Enviornmental settings

```
Expand Down
8 changes: 4 additions & 4 deletions jointContribution/PIRBN/analytical_solution.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,11 +38,11 @@ def output_fig(train_obj, mu, b, right_by, activation_function):

# Loss history of the network during the training process.
plt.subplot(2, 3, 3)
loss_b = train_obj.loss_b
x = range(len(loss_b))
loss_g = train_obj.loss_g
x = range(len(loss_g))
plt.yscale("log")
plt.plot(x, loss_b)
plt.plot(x, train_obj.loss_g)
plt.plot(x, loss_g)
plt.plot(x, train_obj.loss_b)
plt.legend(["Lg", "Lb"])
plt.ylabel("Loss")
plt.xlabel("Iteration")
Expand Down
22 changes: 11 additions & 11 deletions jointContribution/PIRBN/train.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@ def __init__(
]
self.y_train = paddle.to_tensor(y_train, dtype=paddle.get_default_dtype())
self.maxiter = maxiter
self.loss_b = []
self.loss_g = []
self.loss_g = [] # eq loss
self.loss_b = [] # boundary loss
self.iter = 0
self.a_g = paddle.to_tensor(1.0)
self.a_b = paddle.to_tensor(1.0)
Expand All @@ -36,24 +36,24 @@ def __init__(

def Loss(self, x, y, a_g, a_b):
tmp = self.pirbn(x, self.activation_function)
loss_b = 0.5 * paddle.mean(paddle.square(tmp[0] - y[0]))
loss_g = 0.5 * paddle.mean(paddle.square(tmp[1]))
loss = loss_b * a_g + loss_g * a_b
return loss, loss_b, loss_g
loss_g = 0.5 * paddle.mean(paddle.square(tmp[0] - y[0]))
loss_b = 0.5 * paddle.mean(paddle.square(tmp[1]))
loss = loss_g * a_g + loss_b * a_b
return loss, loss_g, loss_b

def evaluate(self):
# compute loss
loss, loss_b, loss_g = self.Loss(self.x_train, self.y_train, self.a_g, self.a_b)
loss_b_numpy = float(loss_b)
loss, loss_g, loss_b = self.Loss(self.x_train, self.y_train, self.a_g, self.a_b)
loss_g_numpy = float(loss_g)
# boundary loss
self.loss_b.append(loss_b_numpy)
loss_b_numpy = float(loss_b)
# eq loss
self.loss_g.append(loss_g_numpy)
# boundary loss
self.loss_b.append(loss_b_numpy)
if self.iter % 200 == 0:
self.a_g, self.a_b, _ = self.pirbn.cal_ntk(self.x_train)
print("\ta_g =", float(self.a_g), "\ta_b =", float(self.a_b))
print("Iter: ", self.iter, "\tL1 =", loss_b_numpy, "\tL2 =", loss_g_numpy)
print("Iter: ", self.iter, "\tL1 =", loss_g_numpy, "\tL2 =", loss_b_numpy)
self.iter = self.iter + 1
return loss

Expand Down

0 comments on commit c46baa4

Please sign in to comment.