diff --git a/examples/Test_TetMesh.ipynb b/examples/Test_TetMesh.ipynb index e42fb624..7ee13f20 100644 --- a/examples/Test_TetMesh.ipynb +++ b/examples/Test_TetMesh.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# Tetrahedral Mesh" + "# Tetrahedral Mesh" ] }, { @@ -20,7 +20,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "First, instead of loading, we define a small tetrahedral mesh represening a cube with a center vertex and twelve tetrahedra." + "First, instead of loading, we define a small tetrahedral mesh representing a cube with a center vertex and twelve tetrahedra." ] }, { diff --git a/examples/Test_TriaMesh_Geodesics.ipynb b/examples/Test_TriaMesh_Geodesics.ipynb index 33482649..005cf1e0 100644 --- a/examples/Test_TriaMesh_Geodesics.ipynb +++ b/examples/Test_TriaMesh_Geodesics.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# TriaMesh Geodesics" + "# TriaMesh Geodesics" ] }, { @@ -352,7 +352,7 @@ "b0=divx\n", " \n", "# solve H x = b0\n", - "# we don't need the B matrix here, as divx is the intgrated divergence \n", + "# we don't need the B matrix here, as divx is the integrated divergence \n", "print(\"Matrix Format now: \"+H.getformat())\n", "if useCholmod:\n", " print(\"Solver: cholesky decomp - performance optimal ...\")\n", @@ -593,7 +593,7 @@ "version": "3" }, "nbsphinx": { - "execute": "always" + "execute": "always" } }, "nbformat": 4, diff --git a/lapy/tria_mesh.py b/lapy/tria_mesh.py index 19e9f5b0..664b7581 100644 --- a/lapy/tria_mesh.py +++ b/lapy/tria_mesh.py @@ -519,7 +519,7 @@ def boundary_loops(self): firstcol = np.nonzero(adj.indptr)[0][0] - 1 loops = [] # loop while we have more first columns: - while not firstcol == []: + while np.size(firstcol) > 0: # start the new loop with this index loop = [firstcol] # delete this entry from matrix (visited) diff --git a/lapy/utils/tests/expected_outcomes.json b/lapy/utils/tests/expected_outcomes.json new file mode 100644 index 00000000..03a68683 --- /dev/null +++ b/lapy/utils/tests/expected_outcomes.json @@ -0,0 +1,155 @@ +{ + "expected_outcomes": { + "test_tria_mesh": { + "expected_euler_value": 2, + "expected_area": [ + 0.5, + 0.5, + 0.5, + 0.5, + 0.5, + 0.5, + 0.5, + 0.5, + 0.5, + 0.5, + 0.5, + 0.5 + ], + "expected_mesh_area": 5.999999999999998, + "expected_vertex_degrees": [6, 4, 4, 4, 4, 4, 6, 4], + "expected_vertex_area": [ + 1.0, + 0.66666667, + 0.66666667, + 0.66666667, + 0.66666667, + 0.66666667, + 1.0, + 0.66666667 + ], + "expected_edge_length": 1.1380711874576983, + "expected_triangle_normals": [ + [0.0, 0.0, -1.0], + [0.0, -0.0, -1.0], + [0.0, 0.0, -1.0], + [0.0, -0.0, -1.0], + [0.0, 1.0, 0.0], + [0.0, 1.0, 0.0], + [-1.0, 0.0, 0.0], + [-1.0, 0.0, -0.0], + [0.0, 1.0, 0.0], + [0.0, 1.0, 0.0], + [-1.0, 0.0, 0.0], + [-1.0, 0.0, -0.0] + ], + "expected_triangle": [ + 0.8660254, + 0.8660254, + 0.8660254, + 0.8660254, + 0.8660254, + 0.8660254, + 0.8660254, + 0.8660254, + 0.8660254, + 0.8660254, + 0.8660254, + 0.8660254 + ], + "expected_vertices": [0, 1, 2, 3, 4, 5, 6, 7], + "expected_flips": 6, + "expected_result": [ + [-0.57735027, -0.57735027, -0.57735027], + [-0.40824829, 0.81649658, -0.40824829], + [0.40824829, 0.40824829, -0.81649658], + [0.81649658, -0.40824829, -0.40824829], + [-0.40824829, -0.40824829, 0.81649658], + [-0.81649658, 0.40824829, 0.40824829], + [0.57735027, 0.57735027, 0.57735027], + [0.40824829, -0.81649658, 0.40824829] + ], + "expected_result_offset": [ + [-0.57735027, -0.57735027, -0.57735027], + [-0.40824829, 0.81649658, -0.40824829], + [0.40824829, 0.40824829, -0.81649658], + [0.81649658, -0.40824829, -0.40824829], + [-0.40824829, -0.40824829, 0.81649658], + [-0.81649658, 0.40824829, 0.40824829], + [0.57735027, 0.57735027, 0.57735027], + [0.40824829, -0.81649658, 0.40824829] + ], + "expected_boundary_loop": [0, 8, 1, 13, 2, 16, 3, 9] + }, + "test_tet_mesh": {"expected_vertices": [0, 1, 2, 3, 4, 5, 6, 7, 8]}, + "test_compute_shapedna": { + "expected_eigenvalues": [-4.0165149e-05, 4.169641, 4.1704664], + "tolerance": 1e-4 + }, + "test_normalize_ev_geometry": { + "expected_normalized_values": [-0.00024099089, 25.017845, 25.022799], + "tolerance": 1e-4 + }, + "test_reweight_ev": { + "expected_reweighted_values": [ + -4.01651487e-05, + 2.08482051e00, + 1.39015547e00 + ], + "tolerance": 1e-4 + }, + "test_compute_distance": {"expected_compute_distance": 0.0}, + "test_compute_shapedna_tet": { + "expected_eigen_values": [8.4440224e-05, 9.8897915e00, 9.8898811e00], + "tolerance": 1e-4 + }, + "test_normalize_ev_geometry_tet": { + "expected_normalized_values": [8.4440224e-05, 9.8897915e00, 9.8898811e00], + "tolerance": 1e-4 + }, + "test_reweight_ev_tet": { + "expected_reweighted_values": [ + 8.44402239e-05, + 4.94489574e00, + 3.29662704e00 + ], + "tolerance": 1e-4 + }, + "test_compute_distance_tet": {"exp_compute_distance": 0.0}, + "test_triangle_mesh_visualization_using_checksum": { + "real_img_checksum": "abaaa757a1e5c1f42a8302fbfe068f49" + }, + "test_TriaMesh_Geodesics": { + "real_img_checksum": "8166f546e6630f017d3ec1f655c0ac85" + }, + "test_Laplace_Geodesics": { + "real_img_checksum": "25f9b10a5d417de3854cf8b8faf77001" + }, + "test_Geodesics_format": { + "expected_matrix_format": "csc", + "max_distance": 0.60497826, + "expected_sqrt_2_div_2": 0.7071067811865476, + "expected_max_abs_diff": 0.0 + }, + "test_TetMesh_Geodesics": { + "expected_evals_len": 10, + "expected_max_col_values": [1.0, 1.0, 1.0], + "expected_min_col_values": [-1.0, -1.0, -1.0], + "expected_matrix_format": "csc", + "max_distance": 0.69931495, + "expected_sqrt": 0.8660254037844386, + "expected_divx": [ + 5.9999948, + 6.0000215, + 6.0000215, + 5.999988, + 6.000053, + 5.999975, + 5.9999676, + 6.000024, + 6.000013, + 6.000008 + ] + } + } +} diff --git a/lapy/utils/tests/test_TetMesh_Geodesics.py b/lapy/utils/tests/test_TetMesh_Geodesics.py new file mode 100644 index 00000000..23bbeaf7 --- /dev/null +++ b/lapy/utils/tests/test_TetMesh_Geodesics.py @@ -0,0 +1,203 @@ +import json + +import numpy as np +import pytest +from scipy.sparse.linalg import splu + +from ...diffgeo import compute_divergence, compute_gradient +from ...heat import diffusion +from ...solver import Solver +from ...tet_mesh import TetMesh + + +# Fixture to load the TetMesh +@pytest.fixture +def load_tet_mesh(): + T = TetMesh.read_vtk("data/cubeTetra.vtk") + return T + + +@pytest.fixture +def loaded_data(): + """ + Load and provide the expected outcomes data from a JSON file. + + Returns: + dict: Dictionary containing the expected outcomes data. + """ + with open("lapy/utils/tests/expected_outcomes.json", "r") as f: + expected_outcomes = json.load(f) + return expected_outcomes + + +# Test if the mesh is oriented +def test_is_oriented(load_tet_mesh): + T = load_tet_mesh + assert not T.is_oriented(), "Mesh is already oriented" + + +# Test orienting the mesh +def test_orient_mesh(load_tet_mesh): + T = load_tet_mesh + T.orient_() + assert T.is_oriented(), "Mesh is not oriented" + + +# Test solving the Laplace eigenvalue problem +def test_solve_eigenvalue_problem(load_tet_mesh): + T = load_tet_mesh + fem = Solver(T, lump=True) + + num_eigenvalues = 10 + evals, evecs = fem.eigs(num_eigenvalues) + + assert len(evals) == num_eigenvalues + assert evecs.shape == (len(T.v), num_eigenvalues) + + +def test_evals_evec_dimension(load_tet_mesh, loaded_data): + T = load_tet_mesh + + expected_evals_len = loaded_data["expected_outcomes"]["test_TetMesh_Geodesics"][ + "expected_evals_len" + ] + + fem = Solver(T, lump=True) + evals, evecs = fem.eigs(expected_evals_len) + assert len(evals) == expected_evals_len + assert np.shape(evecs) == (9261, 10) + + +# Geodesics + + +def test_gradients_normalization_and_divergence(load_tet_mesh, loaded_data): + T = load_tet_mesh + tria = T.boundary_tria() + bvert = np.unique(tria.t) + u = diffusion(T, bvert, m=1) + + # Compute gradients + tfunc = compute_gradient(T, u) + + # Define the expected shape of tfunc (gradient) + expected_tfunc_shape = (48000, 3) + + # Assert that the shape of tfunc matches the expected shape + assert tfunc.shape == expected_tfunc_shape + + # Flip and normalize + X = -tfunc / np.sqrt((tfunc**2).sum(1))[:, np.newaxis] + + # Define the expected shape of X (normalized gradient) + expected_X_shape = (48000, 3) + + # Assert that the shape of X matches the expected shape + assert X.shape == expected_X_shape + + # Load the expected maximum and minimum values for each column of X + expected_max_col_values = loaded_data["expected_outcomes"][ + "test_TetMesh_Geodesics" + ]["expected_max_col_values"] + expected_min_col_values = loaded_data["expected_outcomes"][ + "test_TetMesh_Geodesics" + ]["expected_min_col_values"] + + # Assert maximum and minimum values of each column of X match the expected values + for col in range(X.shape[1]): + assert np.allclose(np.max(X[:, col]), expected_max_col_values[col], atol=1e-6) + assert np.allclose(np.min(X[:, col]), expected_min_col_values[col], atol=1e-6) + + # Compute divergence + divx = compute_divergence(T, X) + + # Define the expected shape of divx (divergence) + expected_divx_shape = (9261,) + + # Assert that the shape of divx matches the expected shape + assert divx.shape == expected_divx_shape + + +def test_tetMesh_Geodesics_format(load_tet_mesh, loaded_data): + """ + Test if matrix format, solver settings, max distance, + and computed values match the expected outcomes. + + Parameters: + - loaded_data (dict): Dictionary containing loaded test data. + + Raises: + - AssertionError: If any test condition is not met. + """ + + T = load_tet_mesh + tria = T.boundary_tria() + bvert = np.unique(tria.t) + u = diffusion(T, bvert, m=1) + + # get gradients + tfunc = compute_gradient(T, u) + # flip and normalize + X = -tfunc / np.sqrt((tfunc**2).sum(1))[:, np.newaxis] + X = np.nan_to_num(X) + # compute divergence + divx = compute_divergence(T, X) + + # compute distance + useCholmod = True + try: + from sksparse.cholmod import cholesky + except ImportError: + useCholmod = False + + fem = Solver(T, lump=True) + A, B = fem.stiffness, fem.mass # computed above when creating Solver + + H = A + b0 = -divx + + # solve H x = b0 + # print("Matrix Format now: "+H.getformat()) + if useCholmod: + print("Solver: cholesky decomp - performance optimal ...") + chol = cholesky(H) + x = chol(b0) + else: + print("Solver: spsolve (LU decomp) - performance not optimal ...") + lu = splu(H) + x = lu.solve(b0) + + x = x - np.min(x) + + # get heat diffusion + + v1func = T.v[:, 0] * T.v[:, 0] + T.v[:, 1] * T.v[:, 1] + T.v[:, 2] * T.v[:, 2] + grad = compute_gradient(T, v1func) + glength = np.sqrt(np.sum(grad * grad, axis=1)) + # fcols=glength + A, B = fem.stiffness, fem.mass + Bi = B.copy() + Bi.data **= -1 + divx2 = Bi * divx + + expected_matrix_format = loaded_data["expected_outcomes"]["test_TetMesh_Geodesics"][ + "expected_matrix_format" + ] + assert H.getformat() == expected_matrix_format + assert np.shape(x) == (9261,) + assert not useCholmod, "Solver: cholesky decomp - performance optimal ..." + expected_max_x = loaded_data["expected_outcomes"]["test_TetMesh_Geodesics"][ + "max_distance" + ] + expected_sqrt_3 = loaded_data["expected_outcomes"]["test_TetMesh_Geodesics"][ + "expected_sqrt" + ] + assert np.isclose(max(x), expected_max_x) + computed_sqrt_3 = 0.5 * np.sqrt(3.0) + assert np.isclose(computed_sqrt_3, expected_sqrt_3) + assert np.shape(glength) == (48000,) + expected_divx = loaded_data["expected_outcomes"]["test_TetMesh_Geodesics"][ + "expected_divx" + ] + assert len(divx2[5000:5010]) == len(expected_divx) + assert not np.all(divx2[5000:5010] == expected_divx), "divergence is equal" diff --git a/lapy/utils/tests/test_shape_DNA.py b/lapy/utils/tests/test_shape_DNA.py new file mode 100644 index 00000000..fbf98dc1 --- /dev/null +++ b/lapy/utils/tests/test_shape_DNA.py @@ -0,0 +1,224 @@ +import json + +import numpy as np +import pytest + +from ...shapedna import compute_distance, compute_shapedna, normalize_ev, reweight_ev +from ...tet_mesh import TetMesh +from ...tria_mesh import TriaMesh + +tria = TriaMesh.read_vtk("data/cubeTria.vtk") +tet = TetMesh.read_vtk("data/cubeTetra.vtk") + + +@pytest.fixture +def loaded_data(): + """ + Load expected outcomes data from a JSON file as a dictionary. + """ + with open("lapy/utils/tests/expected_outcomes.json", "r") as f: + expected_outcomes = json.load(f) + return expected_outcomes + + +def test_compute_shapedna(loaded_data): + """ + Test compute_shapedna function for triangular mesh. + + Args: + loaded_data (dict): Expected outcomes loaded from a JSON file. + + Raises: + AssertionError: If computed eigenvalues don't match expected values within tolerance. + AssertionError: If eigenvalues' dtype isn't float32. + """ + ev = compute_shapedna(tria, k=3) + + expected_Eigenvalues = np.array( + loaded_data["expected_outcomes"]["test_compute_shapedna"][ + "expected_eigenvalues" + ] + ) + tolerance = loaded_data["expected_outcomes"]["test_compute_shapedna"]["tolerance"] + assert np.allclose(ev["Eigenvalues"], expected_Eigenvalues, atol=tolerance) + assert ev["Eigenvalues"].dtype == np.float32 + + +def test_normalize_ev_geometry(loaded_data): + """ + Test normalize_ev() using 'geometry' method for a triangular mesh. + + Args: + loaded_data (dict): Expected outcomes from a JSON file. + + Raises: + AssertionError: If normalized eigenvalues don't match expected values within tolerance. + AssertionError: If normalized eigenvalues' dtype isn't float32. + """ + ev = compute_shapedna(tria, k=3) + + expected_normalized_values = np.array( + loaded_data["expected_outcomes"]["test_normalize_ev_geometry"][ + "expected_normalized_values" + ] + ) + tolerance = loaded_data["expected_outcomes"]["test_normalize_ev_geometry"][ + "tolerance" + ] + normalized_eigenvalues = normalize_ev(tria, ev["Eigenvalues"], method="geometry") + assert np.allclose( + normalized_eigenvalues, expected_normalized_values, atol=tolerance + ) + assert normalized_eigenvalues.dtype == np.float32 + + +def test_reweight_ev(loaded_data): + """ + Test reweighted_ev() and validate reweighted eigenvalues' data type. + + Args: + loaded_data (dict): Expected outcomes from a JSON file. + + Raises: + AssertionError: If reweighted eigenvalues don't match expected values within tolerance. + AssertionError: If reweighted eigenvalues' dtype isn't float32. + """ + ev = compute_shapedna(tria, k=3) + + expected_reweighted_values = np.array( + loaded_data["expected_outcomes"]["test_reweight_ev"][ + "expected_reweighted_values" + ] + ) + tolerance = loaded_data["expected_outcomes"]["test_reweight_ev"]["tolerance"] + reweighted_eigenvalues = reweight_ev(ev["Eigenvalues"]) + tolerance = 1e-4 + assert np.allclose( + reweighted_eigenvalues, expected_reweighted_values, atol=tolerance + ) + + +def test_compute_distance(loaded_data): + """ + Test compute_distance() for eigenvalues and validate the computed distance. + + Args: + loaded_data (dict): Expected outcomes from a JSON file. + + Raises: + AssertionError: If computed distance doesn't match the expected value. + """ + ev = compute_shapedna(tria, k=3) + + expected_compute_distance = loaded_data["expected_outcomes"][ + "test_compute_distance" + ]["expected_compute_distance"] + # compute distance for tria eigenvalues (trivial case) + computed_distance = compute_distance(ev["Eigenvalues"], ev["Eigenvalues"]) + assert computed_distance == expected_compute_distance + + +# Repeating test steps for a tetrahedral mesh + + +def test_compute_shapedna_tet(loaded_data): + """ + Test compute_shapedna for a tetrahedral mesh. + + Args: + loaded_data (dict): Expected outcomes from a JSON file. + + Raises: + AssertionError: If computed eigenvalues don't match expected values within tolerance. + AssertionError: If eigenvalues' dtype isn't float32. + """ + evTet = compute_shapedna(tet, k=3) + + expected_eigen_values = np.array( + loaded_data["expected_outcomes"]["test_compute_shapedna_tet"][ + "expected_eigen_values" + ] + ) + tolerance = loaded_data["expected_outcomes"]["test_compute_shapedna_tet"][ + "tolerance" + ] + evTet = compute_shapedna(tet, k=3) + assert np.allclose(evTet["Eigenvalues"], expected_eigen_values, atol=tolerance) + assert evTet["Eigenvalues"].dtype == np.float32 + + +def test_normalize_ev_geometry_tet(loaded_data): + """ + Test normalize_ev() using 'geometry' method for a tetrahedral mesh. + + Args: + loaded_data (dict): Expected outcomes from a JSON file. + + Raises: + AssertionError: If normalized eigenvalues don't match expected values within tolerance. + AssertionError: If normalized eigenvalues' dtype isn't float32. + """ + evTet = compute_shapedna(tet, k=3) + + expected_normalized_values = np.array( + loaded_data["expected_outcomes"]["test_normalize_ev_geometry_tet"][ + "expected_normalized_values" + ] + ) + tolerance = loaded_data["expected_outcomes"]["test_normalize_ev_geometry_tet"][ + "tolerance" + ] + # volume / surface / geometry normalization of tet eigenvalues + normalized_eigenvalues = normalize_ev(tet, evTet["Eigenvalues"], method="geometry") + + assert np.allclose( + normalized_eigenvalues, expected_normalized_values, atol=tolerance + ) + assert normalized_eigenvalues.dtype == np.float32 + + +def test_reweight_ev_tet(loaded_data): + """ + Test reweighted_ev() for tetrahedral meshes and validate reweighted eigenvalues' data type. + + Args: + loaded_data (dict): Expected outcomes from a JSON file. + + Raises: + AssertionError: If reweighted eigenvalues don't match expected values within tolerance. + """ + evTet = compute_shapedna(tet, k=3) + + expected_reweighted_values = np.array( + loaded_data["expected_outcomes"]["test_reweight_ev_tet"][ + "expected_reweighted_values" + ] + ) + tolerance = loaded_data["expected_outcomes"]["test_reweight_ev_tet"]["tolerance"] + # Linear reweighting of tet eigenvalues + reweighted_eigenvalues = reweight_ev(evTet["Eigenvalues"]) + assert np.allclose( + reweighted_eigenvalues, expected_reweighted_values, atol=tolerance + ) + + +def test_compute_distance_tet(loaded_data): + """ + Test compute_distance() for eigenvalues of tetrahedral meshes and validate computed distance. + + Args: + loaded_data (dict): Expected outcomes from a JSON file. + + Raises: + AssertionError: If computed distance doesn't match the expected value. + """ + evTet = compute_shapedna(tet, k=3) + + # compute distance for tria eigenvalues (trivial case) + computed_distance = compute_distance(evTet["Eigenvalues"], evTet["Eigenvalues"]) + expected_compute_distance = loaded_data["expected_outcomes"][ + "test_compute_distance_tet" + ]["exp_compute_distance"] + + # Compare the computed distance with the expected distance using a tolerance + assert computed_distance == expected_compute_distance diff --git a/lapy/utils/tests/test_tet_mesh.py b/lapy/utils/tests/test_tet_mesh.py new file mode 100644 index 00000000..167193d5 --- /dev/null +++ b/lapy/utils/tests/test_tet_mesh.py @@ -0,0 +1,206 @@ +import json + +import numpy as np +import pytest + +from ...tet_mesh import TetMesh + + +@pytest.fixture +def tet_mesh_fixture(): + points = np.array( + [ + [0, 0, 0], + [1, 0, 0], + [1, 1, 0], + [0, 1, 0], + [0, 0, 1], + [1, 0, 1], + [1, 1, 1], + [0, 1, 1], + [0.5, 0.5, 0.5], + ] + ) + tets = np.array( + [ + [0, 5, 8, 1], + [0, 4, 5, 8], + [2, 5, 6, 8], + [1, 5, 2, 8], + [6, 7, 3, 8], + [6, 3, 2, 8], + [0, 3, 4, 8], + [3, 7, 4, 8], + [0, 1, 2, 8], + [0, 2, 3, 8], + [4, 6, 5, 8], + [4, 7, 6, 8], + ] + ) + + return TetMesh(points, tets) + + +@pytest.fixture +def loaded_data(): + """ + Load expected outcomes data from a JSON file as a dictionary. + """ + with open("lapy/utils/tests/expected_outcomes.json", "r") as f: + expected_outcomes = json.load(f) + return expected_outcomes + + +def test_has_free_vertices(tet_mesh_fixture): + """ + Testing tet mesh has free vertices or not + """ + mesh = tet_mesh_fixture + result = mesh.has_free_vertices() + expected_result = False + assert result == expected_result + + +def test_rm_free_vertices(tet_mesh_fixture, loaded_data): + """ + Testing removing free vertices from tet mesh + """ + mesh = tet_mesh_fixture + updated_vertices, deleted_vertices = mesh.rm_free_vertices_() + expected_vertices = np.array( + loaded_data["expected_outcomes"]["test_tet_mesh"]["expected_vertices"] + ) + expected_removed_vertices = np.array([]) + assert np.array_equal( + updated_vertices, expected_vertices + ), f"{updated_vertices}, {deleted_vertices}" + assert np.array_equal(deleted_vertices, expected_removed_vertices) + + +def test_is_oriented(tet_mesh_fixture): + """ + Testing whether test mesh orientations are consistent + """ + mesh = tet_mesh_fixture + result = mesh.is_oriented() + expected_result = False + assert ( + result == expected_result + ), f"Expected is_oriented result {expected_result}, but got {result}" + + +def test_boundary_trai(tet_mesh_fixture): + """ + Test computation of boundary triangles from tet mesh. + + - `BT.t` represents the array of boundary triangles. + - `.shape[0]` counts the number of boundary triangles. + """ + mesh = tet_mesh_fixture + boundary_tria_mesh = mesh.boundary_tria() + + expected_num_traingles = 12 + assert boundary_tria_mesh.t.shape[0] == expected_num_traingles + + # print(f"Found {boundary_tria_mesh.t.shape[0]} triangles on boundary.") + + # Check if the boundary triangle mesh is not oriented (this should fail) + result = boundary_tria_mesh.is_oriented() + expected_result = False + assert ( + result == expected_result + ), f"Expected is_oriented result {expected_result}, but got {result}" + + +def test_avg_edge_length(tet_mesh_fixture): + """ + Testing the computatoin of average edge length for tetrahedral mesh + """ + mesh = tet_mesh_fixture + result = mesh.avg_edge_length() + + expected_avg_edge_length = 1.0543647924813107 + + assert np.isclose(result, expected_avg_edge_length) + + +def test_boundary_is_oriented(tet_mesh_fixture): + """ + Test orientation consistency in boundary of tetrahedral mesh. + """ + mesh = tet_mesh_fixture + + # Get the boundary triangle mesh + boundary_mesh = mesh.boundary_tria() + + # Check if the boundary triangle mesh has consistent orientations + result = boundary_mesh.is_oriented() + + expected_result = False + + assert result == expected_result + + +def test_orient_and_check_oriented(tet_mesh_fixture): + """ + Test orienting the tetrahedral mesh for consistency. + """ + mesh = tet_mesh_fixture + + # Correct the orientation of the tetrahedral mesh + flipped_tetrahedra = mesh.orient_() + + # Check if the orientations of the tetrahedra are consistent + result = mesh.is_oriented() + + expected_flipped_tetrahedra = 1 + expected_oriented_result = True + + # print(f"{flipped_tetrahedra}") + + assert flipped_tetrahedra == expected_flipped_tetrahedra + assert result == expected_oriented_result + + +def test_correct_orientations_and_boundary(tet_mesh_fixture): + """ + Testing correcting orientation and checking boundary surface orientation + """ + mesh = tet_mesh_fixture + + # Correct the orientation of the tetrahedral mesh + mesh.orient_() + + # Check if the orientations of the tetrahedra are consistent + result_oriented = mesh.is_oriented() + expected_oriented_result = True + assert result_oriented == expected_oriented_result + + # Extract the boundary surface + boundary_surface = mesh.boundary_tria() + print(f"{boundary_surface}") + + # Check if the orientations of the boundary surface are consistent + result_boundary_oriented = boundary_surface.is_oriented() + print(f"{result_boundary_oriented}") + expected_boundary_oriented_result = True + assert result_boundary_oriented == expected_boundary_oriented_result + + +def test_boundary_surface_volume(tet_mesh_fixture): + """ + Testing computation of volume for the boundary surface mesh + """ + mesh = tet_mesh_fixture + + # Correct the orientation of the tetrahedral mesh + mesh.orient_() + + # Extract the boundary surface + boundary_surface = mesh.boundary_tria() + + # Compute the volume of the boundary surface + result_volume = boundary_surface.volume() + expected_volume = 1.0 + + assert np.isclose(result_volume, expected_volume) diff --git a/lapy/utils/tests/test_tria_mesh.py b/lapy/utils/tests/test_tria_mesh.py new file mode 100644 index 00000000..d2b703a9 --- /dev/null +++ b/lapy/utils/tests/test_tria_mesh.py @@ -0,0 +1,374 @@ +import json + +import numpy as np +import pytest + +from ...tria_mesh import TriaMesh + + +@pytest.fixture +def tria_mesh_fixture(): + """ + fixture is a method to parse parameters to the class + only once so that it is not required in each test case + """ + points = np.array( + [ + [0.0, 0.0, 0.0], + [0, 1, 0], + [1, 1, 0], + [1, 0, 0], + [0, 0, 1], + [0, 1, 1], + [1, 1, 1], + [1, 0, 1], + ] + ) + trias = np.array( + [ + [0, 1, 2], + [2, 3, 0], + [4, 5, 6], + [6, 7, 4], + [0, 4, 7], + [7, 3, 0], + [0, 4, 5], + [5, 1, 0], + [1, 5, 6], + [6, 2, 1], + [3, 7, 6], + [6, 2, 3], + ] + ) + return TriaMesh(points, trias) + + +@pytest.fixture +def loaded_data(): + """ + Load and provide the expected outcomes data from a JSON file. + + Returns: + dict: Dictionary containing the expected outcomes data. + """ + with open("lapy/utils/tests/expected_outcomes.json", "r") as f: + expected_outcomes = json.load(f) + return expected_outcomes + + +def test_is_closed(tria_mesh_fixture): + """ + testing whether the function is_closed() returns True + """ + + mesh = tria_mesh_fixture + result = mesh.is_closed() + assert result is True + + +def test_is_manifold(tria_mesh_fixture): + """ + Testing whether the function is_manifold() returns 1 + """ + mesh = tria_mesh_fixture + result = mesh.is_manifold() + expected_result = True + assert ( + result == expected_result + ), f"Expected is_manifold result {expected_result}, but got {result}" + + +def test_is_oriented(tria_mesh_fixture): + """ + Testing whether the function is_oriented() returns True + """ + T = tria_mesh_fixture + result = T.is_oriented() + expected_result = False + assert result == expected_result, f"returning {result}" + + +def test_euler(tria_mesh_fixture, loaded_data): + """ + Testing whether the function euler() is equal to 2 + """ + mesh = tria_mesh_fixture + expected_euler_value = loaded_data["expected_outcomes"]["test_tria_mesh"][ + "expected_euler_value" + ] + result = mesh.euler() + assert ( + result == expected_euler_value + ), f"Expected Euler characteristic 2, but got {result}" + + +def test_tria_areas(tria_mesh_fixture, loaded_data): + """ + np.testing.assert_array_almost_equal raises an AssertionError if two objects i.e tria_areas and expected_area + are not equal up to desired precision. + """ + expected_area = np.array( + loaded_data["expected_outcomes"]["test_tria_mesh"]["expected_area"] + ) + + mesh = tria_mesh_fixture + result = mesh.tria_areas() + np.testing.assert_array_almost_equal(result, expected_area) + + +def test_area(tria_mesh_fixture, loaded_data): + """ + Testing whether the function area() return is almost equal to expected area + """ + mesh = tria_mesh_fixture + result = mesh.area() + expected_mesh_area = float( + loaded_data["expected_outcomes"]["test_tria_mesh"]["expected_mesh_area"] + ) + assert result == pytest.approx(expected_mesh_area) + + +def test_volume(tria_mesh_fixture): + """ + Testing the volume calculation of the mesh for an unoriented mesh + """ + # Assuming that tria_mesh_fixture is unoriented + try: + tria_mesh_fixture.volume() + except ValueError as e: + assert "Can only compute volume for oriented triangle meshes!" in str(e) + else: + assert False # The function should raise a ValueError + + +# Define the test case for non-oriented mesh +def test_volume_oriented(tria_mesh_fixture): + """ + This test is verifying that the T.volume() function raises a ValueError + with the error message when the input TriaMesh object is not correctly oriented. + The test will always pass by matching an error because the volume inside the closed mesh, + however, requires the mesh to be correctly oriented + """ + # Use the appropriate exception that T.volume() raises + with pytest.raises( + ValueError, match="Error: Can only compute volume for oriented triangle meshes!" + ): + tria_mesh_fixture.volume() + + +def test_vertex_degrees(tria_mesh_fixture, loaded_data): + """ + Testing the calculation of vertex degrees + """ + mesh = tria_mesh_fixture + result = mesh.vertex_degrees() + expected_vertex_degrees = np.array( + loaded_data["expected_outcomes"]["test_tria_mesh"]["expected_vertex_degrees"] + ) + np.testing.assert_array_equal(result, expected_vertex_degrees) + + +def test_vertex_areas(tria_mesh_fixture, loaded_data): + """ + Testing the calculation of vertex areas + """ + expected_vertex_area = np.array( + loaded_data["expected_outcomes"]["test_tria_mesh"]["expected_vertex_area"] + ) + mesh = tria_mesh_fixture + result = mesh.vertex_areas() + np.testing.assert_almost_equal(result, expected_vertex_area) + # Verify that the sum of vertex areas is approximately equal to the total surface area + vertex_areas_sum = np.sum(mesh.vertex_areas()) + total_surface_area = mesh.area() + assert np.isclose(vertex_areas_sum, total_surface_area) + + +def test_avg_edge_length(tria_mesh_fixture, loaded_data): + """ + Testing the calculation of average edge length + """ + mesh = tria_mesh_fixture + expected_edge_length = float( + loaded_data["expected_outcomes"]["test_tria_mesh"]["expected_edge_length"] + ) + result = mesh.avg_edge_length() + assert np.isclose( + result, expected_edge_length + ), f"Average edge length {result} is not equal to expected {expected_edge_length}" + + +def test_tria_normals(tria_mesh_fixture, loaded_data): + """ + Testing whether the shape of tria_normals array is equal to + (n_triangles,3) + """ + expected_triangle_normals = np.array( + loaded_data["expected_outcomes"]["test_tria_mesh"]["expected_triangle_normals"] + ) + mesh = tria_mesh_fixture + result = mesh.tria_normals() + np.testing.assert_allclose(result, expected_triangle_normals) + + +def test_tria_qualities(tria_mesh_fixture, loaded_data): + """ + Testing the calculation of triangle qualities + """ + mesh = tria_mesh_fixture + result = mesh.tria_qualities() + expected_triangle = np.array( + loaded_data["expected_outcomes"]["test_tria_mesh"]["expected_triangle"] + ) + np.testing.assert_almost_equal(result, expected_triangle) + + +def test_has_free_vertices(tria_mesh_fixture): + """ + Testing the detection of free vertices + """ + mesh = tria_mesh_fixture + result = mesh.has_free_vertices() + + expected_result = False + assert ( + result == expected_result + ), f"Expected has_free_vertices {expected_result}, but got {result}" + + +def test_rm_free_vertices(tria_mesh_fixture, loaded_data): + """ + Testing the removal of free vertices + """ + updated_vertices, deleted_vertices = tria_mesh_fixture.rm_free_vertices_() + expected_vertices = np.array( + loaded_data["expected_outcomes"]["test_tria_mesh"]["expected_vertices"] + ) + expected_deleted_vertices = np.array([]) + assert np.array_equal(updated_vertices, expected_vertices) + assert np.array_equal(deleted_vertices, expected_deleted_vertices) + + +# Define the test case for orient_ function +def test_orient(tria_mesh_fixture, loaded_data): + """ + Testing the orienting of the mesh + """ + # Call the tria_mesh_fixture.orient_() method to re-orient the mesh consistently + mesh = tria_mesh_fixture + flipped = mesh.orient_() + + # Check if the returned 'flipped' count matches the expected count i.e 6 + expected_flips = np.array( + loaded_data["expected_outcomes"]["test_tria_mesh"]["expected_flips"] + ) + assert flipped == expected_flips + + +def test_is_oriented_(tria_mesh_fixture): + """ + Testing the check for mesh orientation + """ + mesh = tria_mesh_fixture + "orient the mesh consistently so that all triangle normals point outwards." + mesh.orient_() + result = mesh.is_oriented() + expected_result = True + assert ( + result == expected_result + ), f"Expected is_oriented result {expected_result}, but got {result}" + + +## Compute volume (works only for oriented meshes) + + +def test_volume_(tria_mesh_fixture): + """ + Testing the computation of enclosed volume for oriented mesh + """ + mesh = tria_mesh_fixture + mesh.orient_() + result = mesh.volume() + expected_result = 1.0 + assert ( + result == expected_result + ), f"Expected volume result {expected_result}, but got {result}" + + +def test_vertex_normals(tria_mesh_fixture, loaded_data): + """ + Testing the computation of vertex normals for oriented mesh + """ + + # Calling tria_mesh_fixture.orient_() will modify the tria_mesh_fixture in-place and + # return the number of flipped triangles. However, it won't return a new instance of TriaMesh, so assigning + # the result to mesh like mesh = tria_mesh_fixture.orient_() would not work as expected. + + # Ensure the mesh is oriented before computing vertex normals + tria_mesh_fixture.orient_() + mesh = tria_mesh_fixture + result = mesh.vertex_normals() + expected_result = np.array( + loaded_data["expected_outcomes"]["test_tria_mesh"]["expected_result"] + ) + np.testing.assert_allclose(result, expected_result) + + +def test_normal_offset(tria_mesh_fixture, loaded_data): + """ + Testing the normal offset operation + """ + + # Orient the mesh before applying normal offset + mesh = tria_mesh_fixture + mesh.orient_() + + # Get the initial vertex coordinates + mesh.v.copy() + + # Calculate the distance 'd' for the offset + d = 0.2 * mesh.avg_edge_length() + + # Act: Perform the 'normal_offset_' operation + mesh.normal_offset_(d) + + +def test_boundary_mesh(tria_mesh_fixture): + # Original mesh with closed boundaries + + original_mesh = tria_mesh_fixture + + # Create a boundary mesh by dropping triangles + boundary_mesh = TriaMesh(original_mesh.v, original_mesh.t[2:, :]) + + # Check if the boundary mesh has the correct number of vertices and triangles + assert boundary_mesh.v.shape[0] == original_mesh.v.shape[0] + assert boundary_mesh.t.shape[0] == original_mesh.t.shape[0] - 2 + + +def test_refine_and_boundary_loops(tria_mesh_fixture, loaded_data): + """ + Testing boundary loops after refining the mesh + """ + # Create a boundary mesh by dropping triangles + tria_mesh_fixture.orient_() + + original_mesh = tria_mesh_fixture + + boundary_mesh = TriaMesh(original_mesh.v, original_mesh.t[2:, :]) + + # Refine the boundary mesh + boundary_mesh.refine_() + + # Get the boundary loops of the refined mesh + boundary_loops = boundary_mesh.boundary_loops() + + # Check if there is only one boundary loop + assert len(boundary_loops) == 1 + + # Check the vertices along the boundary loop + expected_boundary_loop = loaded_data["expected_outcomes"]["test_tria_mesh"][ + "expected_boundary_loop" + ] + + assert boundary_loops[0] == expected_boundary_loop