diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index de1f40a..19a1066 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -15,6 +15,8 @@ jobs: runs-on: ubuntu-latest container: image: firedrakeproject/firedrake-vanilla-default:latest + env: + FUSE_CI: 1 # Steps represent a sequence of tasks that will be executed as # part of the jobs steps: diff --git a/Makefile b/Makefile index b09392e..01b7807 100644 --- a/Makefile +++ b/Makefile @@ -36,4 +36,5 @@ test_cells: @firedrake-clean @python3 -m pytest -rPx --run-cleared test/test_cells.py::test_ref_els[expect1] -prepush: lint tests doc \ No newline at end of file +prepush: lint tests doc + @rm .coverage.* \ No newline at end of file diff --git a/conftest.py b/conftest.py index c8c4e00..c3c2145 100644 --- a/conftest.py +++ b/conftest.py @@ -1,3 +1,4 @@ +import os import pytest def pytest_addoption(parser): @@ -6,4 +7,42 @@ def pytest_addoption(parser): action="store_true", default=False, help="Run tests that require a cleared cache", - ) \ No newline at end of file + ) + +def _skip_test_dependency(dependency): + """ + Returns whether to skip tests with a certain dependency. + + Usually, this will return True if the dependency is not available. + However, on CI we never want to skip tests because we should + test all functionality there, so if the environment variable + FIREDRAKE_CI=1 then this will always return False. + """ + skip = True + + if os.getenv("FUSE_CI") == "1": + return not skip + + if dependency == "basix": + try: + import basix # noqa: F401 + del basix + return not skip + except ImportError: + return skip + +dependency_skip_markers_and_reasons = ( + ("basix", "skipbasix", "Basix not installed"),) + +def pytest_configure(config): + """Register an additional marker.""" + config.addinivalue_line( + "markers", + "skipbasix: mark as skipped unless Basix is installed" + ) + +def pytest_collection_modifyitems(session, config, items): + for item in items: + for dep, marker, reason in dependency_skip_markers_and_reasons: + if _skip_test_dependency(dep) and item.get_closest_marker(marker) is not None: + item.add_marker(pytest.mark.skip(reason)) diff --git a/docs/source/conf.py b/docs/source/conf.py index cb824a5..a46e43e 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -18,8 +18,8 @@ # -- Project information ----------------------------------------------------- -project = 'RedefiningFE' -copyright = '2023, India Marsden, David A. Ham, Patrick E. Farrell' +project = 'FUSE' +copyright = '2025, India Marsden, David A. Ham, Patrick E. Farrell' author = 'India Marsden, David A. Ham, Patrick E. Farrell' @@ -65,6 +65,7 @@ intersphinx_mapping = { 'ufl': ('https://docs.fenicsproject.org/ufl/main/', None), 'FIAT': ('https://firedrakeproject.org/fiat', None), + 'basix': ('https://docs.fenicsproject.org/basix/main/python', None), 'matplotlib': ('https://matplotlib.org/', None), 'python': ('https://docs.python.org/3/', None), 'numpy': ('https://numpy.org/doc/stable/', None)} diff --git a/fuse/cells.py b/fuse/cells.py index 5f2550e..5b31afe 100644 --- a/fuse/cells.py +++ b/fuse/cells.py @@ -11,6 +11,7 @@ from mpl_toolkits.mplot3d import proj3d from sympy.combinatorics.named_groups import SymmetricGroup from fuse.utils import sympy_to_numpy, fold_reduce +from fuse.geometry import compute_attachment_1d, compute_attachment_2d, compute_attachment_3d from FIAT.reference_element import Simplex, TensorProductCell as FiatTensorProductCell, Hypercube from ufl.cell import Cell, TensorProductCell @@ -63,91 +64,6 @@ def make_arrow_3d(ax, mid, edge, direction=1): ax.add_artist(a) -def construct_attach_2d(a, b, c, d): - """ - Compute polynomial attachment in x based on two points (a,b) and (c,d) - - :param: a,b,c,d: two points (a,b) and (c,d) - """ - x = sp.Symbol("x") - return [((c-a)/2)*(x+1) + a, ((d-b)/2)*(x+1) + b] - - -def construct_attach_3d(res): - """ - Convert matrix of coefficients into a vector of polynomials in x and y - - :param: res: matrix of coefficients - """ - x = sp.Symbol("x") - y = sp.Symbol("y") - xy = sp.Matrix([1, x, y]) - return (xy.T * res) - - -def compute_scaled_verts(d, n): - """ - Construct default cell vertices - - :param: d: dimension of cell - :param: n: number of vertices - """ - if d == 2: - source = np.array([0, 1]) - rot_coords = [source for i in range(0, n)] - - rot_mat = np.array([[np.cos((2*np.pi)/n), -np.sin((2*np.pi)/n)], [np.sin((2*np.pi)/n), np.cos((2*np.pi)/n)]]) - for i in range(1, n): - rot_coords[i] = np.matmul(rot_mat, rot_coords[i-1]) - xdiff, ydiff = (rot_coords[0][0] - rot_coords[1][0], - rot_coords[0][1] - rot_coords[1][1]) - scale = 2 / np.sqrt(xdiff**2 + ydiff**2) - scaled_coords = np.array([[scale*x, scale*y] for (x, y) in rot_coords]) - return scaled_coords - elif d == 3: - if n == 4: - A = [-1, 1, -1] - B = [1, -1, -1] - C = [1, 1, 1] - D = [-1, -1, 1] - coords = [A, B, C, D] - face1 = np.array([A, D, C]) - face2 = np.array([A, B, D]) - face3 = np.array([A, C, B]) - face4 = np.array([B, D, C]) - faces = [face1, face2, face3, face4] - elif n == 8: - coords = [] - faces = [[] for i in range(6)] - for i in [-1, 1]: - for j in [-1, 1]: - for k in [-1, 1]: - coords.append([i, j, k]) - - for j in [-1, 1]: - for k in [-1, 1]: - faces[0].append([1, j, k]) - faces[1].append([-1, j, k]) - faces[2].append([j, 1, k]) - faces[3].append([j, -1, k]) - faces[4].append([j, k, 1]) - faces[5].append([j, k, -1]) - - else: - raise ValueError("Polyhedron with {} vertices not supported".format(n)) - - xdiff, ydiff, zdiff = (coords[0][0] - coords[1][0], - coords[0][1] - coords[1][1], - coords[0][2] - coords[1][2]) - scale = 2 / np.sqrt(xdiff**2 + ydiff**2 + zdiff**2) - scaled_coords = np.array([[scale*x, scale*y, scale*z] for (x, y, z) in coords]) - scaled_faces = np.array([[[scale*x, scale*y, scale*z] for (x, y, z) in face] for face in faces]) - - return scaled_coords, scaled_faces - else: - raise ValueError("Dimension {} not supported".format(d)) - - def polygon(n): """ Constructs the 2D default cell with n sides/vertices @@ -242,7 +158,7 @@ class Point(): id_iter = itertools.count() - def __init__(self, d, edges=[], vertex_num=None, oriented=False, group=None, edge_orientations=None, cell_id=None): + def __init__(self, d, edges=[], vertex_num=None, variant="equilateral", oriented=False, group=None, edge_orientations=None, cell_id=None): if not cell_id: cell_id = next(self.id_iter) self.id = cell_id @@ -254,7 +170,7 @@ def __init__(self, d, edges=[], vertex_num=None, oriented=False, group=None, edg if d == 0: assert (edges == []) if vertex_num: - edges = self.compute_attachments(vertex_num, edges, edge_orientations) + edges = self.compute_attachments(vertex_num, edges, edge_orientations, variant) self.oriented = oriented self.G = nx.DiGraph() @@ -268,11 +184,13 @@ def __init__(self, d, edges=[], vertex_num=None, oriented=False, group=None, edg self.group = group if not group: + if variant != "equilateral": + raise ValueError("Group discovery only supported for equilateral elements") self.group = self.compute_cell_group() self.group = self.group.add_cell(self) - def compute_attachments(self, n, points, orientations=None): + def compute_attachments(self, n, points, orientations=None, variant="equilateral"): """ Compute the attachment function between two nodes @@ -284,40 +202,17 @@ def compute_attachments(self, n, points, orientations=None): orientations = {} if self.dimension == 1: - edges = [Edge(points[0], sp.sympify((-1,))), - Edge(points[1], sp.sympify((1,)))] + attachments = compute_attachment_1d(n, variant) if self.dimension == 2: - coords = compute_scaled_verts(2, n) - edges = [] - - for i in range(n): - a, b = coords[i] - c, d = coords[(i + 1) % n] - - if i in orientations.keys(): - edges.append(Edge(points[i], construct_attach_2d(a, b, c, d), o=points[i].group.get_member(orientations[i]))) - else: - edges.append(Edge(points[i], construct_attach_2d(a, b, c, d))) + attachments = compute_attachment_2d(n, variant) if self.dimension == 3: - coords, faces = compute_scaled_verts(3, n) - coords_2d = np.c_[np.ones(len(faces[0])), compute_scaled_verts(2, len(faces[0]))] - res = [] - edges = [] - - for i in range(len(faces)): - res = np.linalg.solve(coords_2d, faces[i]) - - res_fn = construct_attach_3d(res) - # breakpoint() - assert np.allclose(np.array(res_fn.subs({"x": coords_2d[0][1], "y": coords_2d[0][2]})).astype(np.float64), faces[i][0]) - assert np.allclose(np.array(res_fn.subs({"x": coords_2d[1][1], "y": coords_2d[1][2]})).astype(np.float64), faces[i][1]) - assert np.allclose(np.array(res_fn.subs({"x": coords_2d[2][1], "y": coords_2d[2][2]})).astype(np.float64), faces[i][2]) - if i in orientations.keys(): - edges.append(Edge(points[i], construct_attach_3d(res), o=points[i].group.get_member(orientations[i]))) - else: - edges.append(Edge(points[i], construct_attach_3d(res))) - - # breakpoint() + attachments = compute_attachment_3d(n, variant) + edges = [] + for i in range(n): + if i in orientations.keys(): + edges.append(Edge(points[i], attachments[i], o=points[i].group.get_member(orientations[i]))) + else: + edges.append(Edge(points[i], attachments[i])) return edges def compute_cell_group(self): @@ -337,7 +232,9 @@ def compute_cell_group(self): reordered = element(verts) for edge in edges: diff = np.subtract(v_coords[reordered.index(edge[0])], v_coords[reordered.index(edge[1])]).squeeze() + edge_len = np.sqrt(np.dot(diff, diff)) + if not np.allclose(edge_len, 2): accepted_perms.remove(element) break @@ -515,13 +412,17 @@ def permute_entities(self, g, d): return reordered_entities def basis_vectors(self, return_coords=True, entity=None): + """ + Compute basis vectors of the cell defined as: + t_i = v_{i+1} - v_0 + + Orientation comes from ordered_vertices - using self.graph() to avoid orientation in G. + """ if not entity: entity = self - entity_levels = [sorted(generation) for generation in nx.topological_generations(entity.G)] - self_levels = [sorted(generation) for generation in nx.topological_generations(self.G)] - vertices = entity_levels[entity.graph_dim()] + self_levels = [sorted(generation) for generation in nx.topological_generations(self.graph())] + vertices = entity.ordered_vertices() if self.dimension == 0: - # return [[] raise ValueError("Dimension 0 entities cannot have Basis Vectors") top_level_node = self_levels[0][0] v_0 = vertices[0] @@ -569,7 +470,7 @@ def plot(self, show=True, plain=False, ax=None, filename=None): vert_coords += [plotted] if not plain: plt.plot(plotted[0], plotted[1], 'bo') - plt.annotate(node, (plotted[0], plotted[1])) + plt.annotate(self.ordered_vertices().index(node), (plotted[0], plotted[1])) elif i == 1: edgevals = np.array([attach(x) for x in xs]) if len(edgevals[0]) < 2: @@ -633,6 +534,7 @@ def attachment(self, source, dst): if dst_dim == 0: vals = [fold_reduce(attachment) for attachment in attachments] assert all(np.isclose(val, vals[0]).all() for val in vals) + else: for i in range(dst_dim): vals = [fold_reduce(attachment, *tuple(basis[i].tolist())) @@ -712,7 +614,7 @@ def __call__(self, *x): if hasattr(self.attachment, '__iter__'): res = [] for attach_comp in self.attachment: - if len(attach_comp.atoms(sp.Symbol)) == len(x): + if len(attach_comp.atoms(sp.Symbol)) <= len(x): res.append(sympy_to_numpy(attach_comp, syms, x)) else: res.append(attach_comp.subs({syms[i]: x[i] for i in range(len(x))})) @@ -797,7 +699,7 @@ class CellComplexToFiatSimplex(Simplex): def __init__(self, cell, name=None): self.fe_cell = cell if name is None: - name = "FuseCell" + name = generic_cellname(cell) self.name = name # verts = [cell.get_node(v, return_coords=True) for v in cell.ordered_vertices()] @@ -914,28 +816,7 @@ def __init__(self, cell, name=None): # TODO work out generic way around the naming issue if not name: - num_verts = len(cell.vertices()) - if num_verts == 1: - # Point - name = "vertex" - elif num_verts == 2: - # Line - name = "interval" - elif num_verts == 3: - # Triangle - name = "triangle" - elif num_verts == 4: - if cell.dimension == 2: - # quadrilateral - name = "quadrilateral" - elif cell.dimension == 3: - # tetrahedron - name = "tetrahedron" - elif num_verts == 8: - # hexahedron - name = "hexahedron" - else: - raise TypeError("UFL cell conversion undefined for {}".format(str(cell))) + name = generic_cellname(cell) super(CellComplexToUFL, self).__init__(name) def to_fiat(self): @@ -980,3 +861,23 @@ def constructCellComplex(name): return TensorProductPoint(*components).to_ufl(name) else: raise TypeError("Cell complex construction undefined for {}".format(str(name))) + + +def generic_cellname(cell): + num_verts = len(cell.vertices()) + if num_verts == 1: + name = "vertex" + elif num_verts == 2: + name = "interval" + elif num_verts == 3: + name = "triangle" + elif num_verts == 4: + if cell.dimension == 2: + name = "quadrilateral" + elif cell.dimension == 3: + name = "tetrahedron" + elif num_verts == 8: + name = "hexahedron" + else: + raise TypeError("Generic cell conversion undefined for {}".format(str(cell))) + return name diff --git a/fuse/geometry.py b/fuse/geometry.py new file mode 100644 index 0000000..7e6b20a --- /dev/null +++ b/fuse/geometry.py @@ -0,0 +1,172 @@ +import numpy as np +import sympy as sp +import itertools + + +def construct_attach_2d(a, b, c, d): + """ + Compute polynomial attachment in x based on two points (a,b) and (c,d) + + :param: a,b,c,d: two points (a,b) and (c,d) + """ + x = sp.Symbol("x") + return [((c-a)/2)*(x+1) + a, ((d-b)/2)*(x+1) + b] + + +def construct_attach_3d(res): + """ + Convert matrix of coefficients into a vector of polynomials in x and y + + :param: res: matrix of coefficients + """ + x = sp.Symbol("x") + y = sp.Symbol("y") + xy = sp.Matrix([1, x, y]) + return (xy.T * res) + + +def compute_equilateral_verts(d, n): + """ + Construct default cell vertices + + :param: d: dimension of cell + :param: n: number of vertices + """ + if d == 1: + return np.array([[-1], [1]]) + elif d == 2: + source = np.array([0, 1]) + rot_coords = [source for i in range(0, n)] + + rot_mat = np.array([[np.cos((2*np.pi)/n), -np.sin((2*np.pi)/n)], [np.sin((2*np.pi)/n), np.cos((2*np.pi)/n)]]) + for i in range(1, n): + rot_coords[i] = np.matmul(rot_mat, rot_coords[i-1]) + xdiff, ydiff = (rot_coords[0][0] - rot_coords[1][0], + rot_coords[0][1] - rot_coords[1][1]) + scale = 2 / np.sqrt(xdiff**2 + ydiff**2) + scaled_coords = np.array([[scale*x, scale*y] for (x, y) in rot_coords]) + return scaled_coords + elif d == 3: + if n == 4: + A = [-1, 1, -1] + B = [1, -1, -1] + C = [1, 1, 1] + D = [-1, -1, 1] + coords = [A, B, C, D] + face1 = np.array([A, D, C]) + face2 = np.array([A, B, D]) + face3 = np.array([A, C, B]) + face4 = np.array([B, D, C]) + faces = [face1, face2, face3, face4] + elif n == 8: + coords = [] + faces = [[] for i in range(6)] + for i in [-1, 1]: + for j in [-1, 1]: + for k in [-1, 1]: + coords.append([i, j, k]) + + for j in [-1, 1]: + for k in [-1, 1]: + faces[0].append([1, j, k]) + faces[1].append([-1, j, k]) + faces[2].append([j, 1, k]) + faces[3].append([j, -1, k]) + faces[4].append([j, k, 1]) + faces[5].append([j, k, -1]) + + else: + raise ValueError("Polyhedron with {} vertices not supported".format(n)) + + xdiff, ydiff, zdiff = (coords[0][0] - coords[1][0], + coords[0][1] - coords[1][1], + coords[0][2] - coords[1][2]) + scale = 2 / np.sqrt(xdiff**2 + ydiff**2 + zdiff**2) + scaled_coords = np.array([[scale*x, scale*y, scale*z] for (x, y, z) in coords]) + scaled_faces = np.array([[[scale*x, scale*y, scale*z] for (x, y, z) in face] for face in faces]) + + return scaled_coords, scaled_faces + else: + raise ValueError("Dimension {} not supported".format(d)) + + +def compute_ufc_verts(d, n): + """ + Construct UFC cell vertices + + :param: d: dimension of cell + :param: n: number of vertices + + sorts the list for a consistent ordering. + """ + if d + 1 == n: + values = [1] + [0 for _ in range(d)] + elif 2**d == n: + values = [1 for _ in range(d)] + [0 for _ in range(d)] + else: + raise NotImplementedError(f"Not able to construct cell in {d} dimensions with {n} vertices.") + + # remove duplicates, sort starting with first element, convert to np float + vertices = np.array(sorted(list(set(itertools.permutations(values, d))), key=lambda x: x[::-1]), dtype=np.float64) + + if d == 3: + raise NotImplementedError("Need to construct UFC faces in 3D") + if n == 4: + faces = set(itertools.permutations(vertices, 3)) + print(faces) + elif n == 8: + faces = set(itertools.permutations(vertices, 4)) + return vertices + return vertices + + +coord_variants = {"equilateral": compute_equilateral_verts, + "ufc": compute_ufc_verts} + + +def compute_attachment_1d(n, variant="equilateral"): + """ + Constructs sympy functions for attaching the points of + n vertices and coordinates determined by variant. + + :param: n: number of vertices + :param: variant: vertex type""" + coords = coord_variants[variant](1, n) + return [sp.sympify(tuple(c)) for c in coords] + + +def compute_attachment_2d(n, variant="equilateral"): + """ + Constructs sympy functions for attaching the edges of a + polygon with n vertices and coordinates determined by variant. + + :param: n: number of vertices + :param: variant: vertex type""" + coords = coord_variants[variant](2, n) + attachments = [] + + for i in range(n): + a, b = coords[i] + c, d = coords[(i + 1) % n] + + attachments.append(construct_attach_2d(a, b, c, d)) + return attachments + + +def compute_attachment_3d(n, variant="equilateral"): + coords, faces = compute_equilateral_verts(3, n) + coords_2d = np.c_[np.ones(len(faces[0])), compute_equilateral_verts(2, len(faces[0]))] + res = [] + attachments = [] + + for i in range(len(faces)): + res = np.linalg.solve(coords_2d, faces[i]) + + res_fn = construct_attach_3d(res) + + assert np.allclose(np.array(res_fn.subs({"x": coords_2d[0][1], "y": coords_2d[0][2]})).astype(np.float64), faces[i][0]) + assert np.allclose(np.array(res_fn.subs({"x": coords_2d[1][1], "y": coords_2d[1][2]})).astype(np.float64), faces[i][1]) + assert np.allclose(np.array(res_fn.subs({"x": coords_2d[2][1], "y": coords_2d[2][2]})).astype(np.float64), faces[i][2]) + + attachments.append(construct_attach_3d(res)) + return attachments diff --git a/fuse/triples.py b/fuse/triples.py index f977b2a..26e4f5a 100644 --- a/fuse/triples.py +++ b/fuse/triples.py @@ -99,7 +99,7 @@ def to_ufl(self): def to_fiat(self): ref_el = self.cell.to_fiat() dofs = self.generate() - degree = self.spaces[0].degree() + degree = self.degree() entity_ids = {} entity_perms = {} nodes = [] @@ -139,6 +139,13 @@ def to_fiat(self): dual = DualSet(nodes, ref_el, entity_ids) return CiarletElement(poly_set, dual, degree, form_degree) + def to_basix(self): + try: + from fuse_basix.basix_interface import convert_to_basix_element + return convert_to_basix_element(self) + except ImportError: + raise ImportError("Basix needs to be installed to convert FUSE to Basix") + def plot(self, filename="temp.png"): # point evaluation nodes only dofs = self.generate() diff --git a/fuse_basix/__init__.py b/fuse_basix/__init__.py new file mode 100644 index 0000000..9c6b6c1 --- /dev/null +++ b/fuse_basix/__init__.py @@ -0,0 +1 @@ +from fuse_basix.basix_interface import convert_to_basix_element \ No newline at end of file diff --git a/fuse_basix/basix_interface.py b/fuse_basix/basix_interface.py new file mode 100644 index 0000000..126851f --- /dev/null +++ b/fuse_basix/basix_interface.py @@ -0,0 +1,144 @@ +from fuse import * +import numpy as np +from basix import MapType, SobolevSpace, create_custom_element, PolysetType, CellType + + +basix_elements = { "interval": CellType.interval, + "triangle": CellType.triangle} + +"""Fuse doesn't support L2Piola, doubleContravariantPiola, doubleCovariantPiola""" +basix_mapping = { "HCurl": MapType.covariantPiola, + "HDiv": MapType.contravariantPiola, + "H1": MapType.identity, + "L2": MapType.identity, +} + +"""Fuse doesn't yet support H3, HDivDiv, HEin, HInf""" +basix_sobolev = {"L2": SobolevSpace.L2, + "H1": SobolevSpace.H1, + "HDiv": SobolevSpace.HDiv, + "HCurl": SobolevSpace.HCurl, + "H2": SobolevSpace.H2} + +def compute_points_and_matrices(triple, ref_el): + dofs = triple.generate() + degree = triple.degree() + + spdim = ref_el.get_spatial_dimension() + top = ref_el.get_topology() + + min_ids = triple.cell.get_starter_ids() + value_shape = triple.get_value_shape() + if len(value_shape) > 1: + value_size = value_shape[0] + else: + value_size = 1 + xs = [[np.zeros((0, spdim)) for entity in top[dim]] for dim in sorted(top) ] + Ms = [[None for entity in top[dim]] for dim in sorted(top) ] + + entities = [(dim, entity) for dim in sorted(top) for entity in top[dim]] + for entity in entities: + dof_added = False + dim = entity[0] + for i in range(len(dofs)): + if entity[1] == dofs[i].trace_entity.id - min_ids[dim]: + dof_added = True + converted = dofs[i].convert_to_fiat(ref_el, degree) + pt_dict = converted.pt_dict + dof_keys = list((pt_dict.keys())) + dof_M = [] + for d in dof_keys: + wts = [] + for pt in pt_dict[d]: + if len(value_shape) > 1: + wt = np.zeros_like(value_shape) + wt[pt[1]] = pt[0] + else: + wt = [pt[0]] + wts.append(wt) + dof_M.append(wts) + derivs = converted.max_deriv_order + if derivs == 0: + M = np.array([dof_M]) + else: + raise NotImplementedError("Derivatives need adding") + xs[dim][entity[1]] = np.r_[xs[dim][entity[1]], np.array(transform_to_basix_cell(triple.cell, dof_keys))] + + if Ms[dim][entity[1]] is None: + Ms[dim][entity[1]] = M + else: + Ms[dim][entity[1]] = np.r_[Ms[dim][entity[1]], M] + + if not dof_added: + Ms[dim][entity[1]] = np.zeros((0, value_size, 0, 1)) + + + # remove when basix does this for me + if len(xs) < 4: + for _ in range(4 - len(xs)): + xs.append([]) + if len(Ms) < 4: + for _ in range(4 - len(Ms)): + Ms.append([]) + + return xs, Ms + + +def convert_to_basix_element(triple): + ref_el = triple.cell.to_fiat() + polyset = triple.spaces[0].to_ON_polynomial_set(ref_el) + value_shape = list(triple.get_value_shape()) + x, M = compute_points_and_matrices(triple, ref_el) + return create_custom_element(basix_elements[ref_el.cellname()], + value_shape, + polyset.coeffs, + x, + M, + 0, # skip derivative for now + basix_mapping[str(triple.spaces[1])], + basix_sobolev[str(triple.spaces[1])], + # if no dofs defined on boundary entities it is discontinuous + sum(len(x[i]) for i in range(triple.cell.get_spatial_dimension())) == 0, + triple.spaces[0].contains, + triple.spaces[0].maxdegree, + PolysetType.standard, # don't yet support macro elements + ) + + +def ufc_triangle(): + vertices = [] + for i in range(3): + vertices.append(Point(0)) + edges = [] + + edges.append(Point(1, [vertices[0], vertices[1]], vertex_num=2)) + edges.append(Point(1, [vertices[1], vertices[2]], vertex_num=2)) + edges.append(Point(1, [vertices[0], vertices[2]], vertex_num=2)) + + tri = Point(2, edges, vertex_num=3, variant="ufc", group=S1, edge_orientations={2: [1, 0]}) + return tri + + +basix_elements_fuserep = { "interval": Point(1, [Point(0), Point(0)], vertex_num=2, variant="ufc", group=S2), + "triangle": ufc_triangle()} + + +def transform_points(cell_a, cell_b, points): + v_a = np.array(cell_a.vertices(return_coords=True)) + v_b = np.array(cell_b.vertices(return_coords=True)) + + A = np.r_[v_a.T, np.ones((1, len(v_a)))] + B = np.r_[v_b.T, np.ones((1, len(v_b)))] + transform = B @ np.linalg.inv(A) + + res = (transform @ np.r_[np.array(points).T, np.ones((1, len(points)))])[:-1] + res = [tuple(res.T[i]) for i in range(res.shape[1])] + return res + +def transform_to_basix_cell(fuse_cell, x): + try: + basix_cell = basix_elements_fuserep[fuse_cell.to_ufl().cellname()] + except KeyError: + raise NotImplementedError(f"Fuse cell {fuse_cell.to_ufl().cellname()} doesn't have a Basix equivalent") + + return transform_points(fuse_cell, basix_cell, x) \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index 5b801c1..95e48f0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,7 +25,7 @@ dependencies = [ ] [project.urls] -Repository = "https://github.com/indiamai/fuse.git" +Repository = "https://github.com/firedrakeproject/fuse.git" [project.optional-dependencies] doc = [ @@ -35,12 +35,15 @@ dev = [ "pytest", "coverage", "flake8", + "fenics-basix", ] +[tool.setuptools] +packages = ["fuse", "fuse_basix"] [tool.pytest.ini_options] testpaths = [ - "tests", + "test", ] [tool.coverage.run] diff --git a/test/test_cells.py b/test/test_cells.py index 900828d..88e18eb 100644 --- a/test/test_cells.py +++ b/test/test_cells.py @@ -37,7 +37,6 @@ def test_basis_vectors(C): def test_orientation(): cell = Point(1, [Point(0), Point(0)], vertex_num=2) - print(cell.get_topology()) for g in cell.group.members(): if not g.perm.is_Identity: oriented = cell.orient(g) diff --git a/test/test_convert_to_basix.py b/test/test_convert_to_basix.py new file mode 100644 index 0000000..0439efc --- /dev/null +++ b/test/test_convert_to_basix.py @@ -0,0 +1,115 @@ +import pytest +from fuse import * +import numpy as np +from test_convert_to_fiat import create_dg1, create_cg2, create_cg2_tri +from test_2d_examples_docs import construct_cg3 + + +def create_cg1(cell): + deg = 1 + vert_dg = create_dg1(cell.vertices()[0]) + xs = [immerse(cell, vert_dg, TrH1)] + + Pk = PolynomialSpace(deg) + cg = ElementTriple(cell, (Pk, CellL2, C0), DOFGenerator(xs, get_cyc_group(len(cell.vertices())), S1)) + return cg + + +@pytest.mark.skipbasix +@pytest.mark.parametrize("elem_gen,deg", [(create_cg1, 1), + (create_cg2, 2)]) +def test_basix_conversion_interval(elem_gen, deg): + import basix + cell = Point(1, [Point(0), Point(0)], vertex_num=2) + cg = elem_gen(cell) + element = cg.to_basix() + + points = np.array([[0.0], [1.0], [0.5], [-1.0]]) + fuse_to_basix = element.tabulate(0, points) + lagrange = basix.create_element( + basix.ElementFamily.P, basix.CellType.interval, deg, basix.LagrangeVariant.equispaced + ) + basix_pts = lagrange.tabulate(0, points) + assert np.allclose(fuse_to_basix, basix_pts) + + +@pytest.mark.skipbasix +@pytest.mark.parametrize("elem_gen,deg", [(create_cg1, 1), + (create_cg2_tri, 2), + pytest.param(construct_cg3, 3, marks=pytest.mark.xfail(reason='M needs work'))]) +def test_basix_conversion(elem_gen, deg): + import basix + cell = polygon(3) + cg = elem_gen(cell) + element = cg.to_basix() + + points = np.array([[0.0, 0.0], [1.0, 0.0], [0.0, 1.0], [0.5, 0.5]]) + fuse_to_basix = element.tabulate(0, points) + + lagrange = basix.create_element( + basix.ElementFamily.P, basix.CellType.triangle, deg, basix.LagrangeVariant.equispaced + ) + basix_pts = lagrange.tabulate(0, points) + assert np.allclose(fuse_to_basix, basix_pts) + + +@pytest.mark.skipbasix +def test_cells(): + import basix + from basix import CellType + from fuse_basix.basix_interface import ufc_triangle, transform_points + + print(basix.cell.geometry(CellType.triangle)) + print(basix.cell.topology(CellType.triangle)) + + tri_fuse = polygon(3) + tri_basix = ufc_triangle() + + print("RIGHT", tri_basix) + # print([tri_basix.get_node(i, return_coords=True) for i in tri_basix.ordered_vertices()]) + print(tri_basix.get_topology()) + print(tri_basix.basis_vectors()) + # tri_basix.plot(filename="basix2.png") + + print("FUSE", tri_fuse) + # print([tri_fuse.get_node(i, return_coords=True) for i in tri_fuse.ordered_vertices()]) + print(tri_fuse.get_topology()) + print(tri_fuse.basis_vectors()) + # tri_fuse.plot(filename="fuse.png") + print(tri_fuse.vertices(return_coords=True)) + print(transform_points(tri_fuse, tri_basix, tri_fuse.vertices(return_coords=True))) + +# def test_del(): +# M = [[], [], [], []] +# for _ in range(4): +# M[0].append(np.array([[[[1.0]]]])) +# M[2].append(np.array([[[[1.0]]]])) + +# # There are no DOFs associates with the edges for this element, so we add an empty +# # matrix for each edge. + +# for _ in range(4): +# M[1].append(np.zeros((0, 1, 0, 1))) +# print(M) + +# x = [[], [], [], []] +# x[0].append(np.array([[0.0, 0.0]])) +# x[0].append(np.array([[1.0, 0.0]])) +# x[0].append(np.array([[0.0, 1.0]])) + +# for _ in range(3): +# x[1].append(np.zeros((0, 2))) +# x[2].append(np.zeros((0, 2))) + +# M = [[], [], [], []] +# for _ in range(3): +# M[0].append(np.array([[[1.0]]])) + +# for _ in range(3): +# M[1].append(np.zeros((0, 1, 0))) +# M[2].append(np.zeros((0, 1, 0, 1))) +# print(M) +# # cell = polygon(3) +# # cg = create_cg1(cell) +# # x, M = cg.to_basix() +# # print(M)