Skip to content

Commit

Permalink
Merge pull request #36 from adtzlr/add-materialtensor
Browse files Browse the repository at this point in the history
Add MaterialTensor
  • Loading branch information
adtzlr authored Nov 9, 2021
2 parents 0f21ee7 + aca19ad commit 6f3092e
Show file tree
Hide file tree
Showing 6 changed files with 145 additions and 5 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -155,7 +155,7 @@ b) the slope of stress vs. stretch and
c) the sign of the resulting stretch from a small superposed force in one direction.

## Hints and usage in FEM modules
Please have a look at [casADi's documentation](https://web.casadi.org/). It is very powerful but unfortunately does not support all the Python stuff you would expect. For example Python's default if-else-statements can't be used in combination with symbolic conditions (use `math.if_else(cond, if_true, if_false)` instead).
For tensor-valued material definitions use `MaterialTensor` (e.g. any stress-strain relation). Please have a look at [casADi's documentation](https://web.casadi.org/). It is very powerful but unfortunately does not support all the Python stuff you would expect. For example Python's default if-else-statements can't be used in combination with symbolic conditions (use `math.if_else(cond, if_true, if_false)` instead).

Simple examples for using `matadi` with [`scikit-fem`](https://github.com/adtzlr/matadi/discussions/14#) as well as with [`felupe`](https://github.com/adtzlr/matadi/discussions/22) are shown in the Discussion section.

Expand Down
2 changes: 1 addition & 1 deletion matadi/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

Variable = casadi.SX.sym

from ._material import Material
from ._material import Material, Material as MaterialScalar, MaterialTensor
from . import models
from . import math
from ._templates import ThreeFieldVariation, MaterialHyperelastic
Expand Down
57 changes: 57 additions & 0 deletions matadi/_material.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,3 +89,60 @@ def hessian(self, x, threads=1):
fun_shape=self._idx_hessian,
threads=threads,
)


class MaterialTensor:
def __init__(self, x, fun, args=(), kwargs={}, compress=False):

self.x = x
self._fun = fun

self.args = args
self.kwargs = kwargs

self._f = self._fun(self.x, *self.args, **self.kwargs)
self._g = [ca.jacobian(self._f, y) for y in self.x]

# alias
self.jacobian = self.gradient

# generate casADi function objects
self._function = ca.Function("f", self.x, [self._f])
self._gradient = ca.Function("g", self.x, self._g)

# generate indices
self._idx_function = [y.shape for y in x]
self._idx_gradient = []

if compress:
for i in range(len(self._idx_function)):
if np.all(np.array(self._idx_function[i]) == 1):
self._idx_function[i] = ()

for i in range(len(self._idx_function)):
a = self._idx_function[i]

for j in range(len(self._idx_function)):
b = self._idx_function[j]

self._idx_gradient.append((*a, *b))

def function(self, x, threads=1):
"Return the function."
return apply(
x,
fun=self._function,
x_shape=self._idx_function,
fun_shape=self._idx_function,
threads=threads,
)

def gradient(self, x, threads=1):
"Return list of gradients."
return apply(
x,
fun=self._gradient,
x_shape=self._idx_function,
fun_shape=self._idx_gradient,
threads=threads,
)
5 changes: 5 additions & 0 deletions matadi/math.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,3 +61,8 @@ def eigvals(T, eps=1e-5):
wT = eig_symbolic(T)

return wT


def cof(T):

return det(T) * transpose(inv(T))
33 changes: 31 additions & 2 deletions tests/test_eigvals_grad.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import numpy as np

from matadi import Variable, Material
from matadi.math import transpose, eigvals, sum1, trace
from matadi import Variable, Material, MaterialTensor
from matadi.math import transpose, eigvals, sum1, trace, cof, inv, det, SX


def fun(x):
Expand Down Expand Up @@ -81,6 +81,35 @@ def test_eigvals_single():
assert np.allclose(DW[0], Eye4)


def test_cof():

# variables
F = Variable("F", 3, 3)

# data
FF = np.diag(2.4 * np.ones(3)).reshape(3, 3, 1, 1)

# fun
def g(x):
F = x[0]
return cof(F)

# init Material
W = MaterialTensor(x=[F], fun=g)
W = MaterialTensor(x=[F], fun=lambda x: x[0])

Eye = np.eye(3)
Eye4 = np.einsum("ij,kl->ikjl", Eye, Eye)

WW = W.function([FF])
dW = W.gradient([FF])
DW = W.jacobian([FF])

assert np.allclose(dW, DW)
assert np.allclose(dW[0][:, :, :, :, 0, 0], Eye4)


if __name__ == "__main__":
test_eigvals()
test_eigvals_single()
test_cof()
51 changes: 50 additions & 1 deletion tests/test_simple.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import numpy as np

from matadi import Variable, Material
from matadi import Variable, Material, MaterialTensor
from matadi.math import det, transpose, trace, invariants, sqrt


Expand Down Expand Up @@ -55,5 +55,54 @@ def test_simple():
assert DW[0].shape == (3, 3, 3, 3, 8, 1000)


def test_tensor():

# variables
F = Variable("F", 3, 3)
p = Variable("p", 1, 1)

# data
FF = np.random.rand(3, 3, 8, 1000)

for a in range(3):
FF[a, a] += 1

# init Material
W = MaterialTensor(x=[F], fun=lambda x: x[0])

W0 = W.function([FF])
dW = W.gradient([FF])
DW = W.jacobian([FF])

# dW and DW are always lists...
assert W0[0].shape == (3, 3, 8, 1000)
assert dW[0].shape == (3, 3, 3, 3, 8, 1000)
assert DW[0].shape == (3, 3, 3, 3, 8, 1000)

# check output of parallel evaluation
W0 = W.function([FF], threads=2)
dW = W.gradient([FF], threads=2)
DW = W.jacobian([FF], threads=2)

assert W0[0].shape == (3, 3, 8, 1000)
assert dW[0].shape == (3, 3, 3, 3, 8, 1000)
assert DW[0].shape == (3, 3, 3, 3, 8, 1000)

# init Material
pp = np.random.rand(8, 1000)
W = MaterialTensor(x=[p], fun=lambda x: x[0], compress=True)
W0 = W.function([pp], threads=2)

assert W0[0].shape == (8, 1000)

# init Material
pp = np.random.rand(1, 1, 8, 1000)
W = MaterialTensor(x=[p], fun=lambda x: x[0])
W0 = W.function([pp], threads=2)

assert W0[0].shape == (1, 1, 8, 1000)


if __name__ == "__main__":
test_simple()
test_tensor()

0 comments on commit 6f3092e

Please sign in to comment.