Coverage for tests / spaces / test_hamming_graph.py: 100%
50 statements
« prev ^ index » next coverage.py v7.13.4, created at 2026-02-22 15:49 +0000
« prev ^ index » next coverage.py v7.13.4, created at 2026-02-22 15:49 +0000
1import lab as B
2import numpy as np
3import pytest
5from geometric_kernels.kernels import MaternGeometricKernel
6from geometric_kernels.spaces import HammingGraph, HypercubeGraph
7from geometric_kernels.utils.kernel_formulas import hamming_graph_heat_kernel
9from ..helper import check_function_with_backend
12@pytest.fixture(params=[(1, 2), (2, 2), (5, 2), (10, 2), (10, 4)])
13def inputs(request) -> tuple[B.Numeric]:
14 """
15 Returns a tuple (space, eigenfunctions, X, X2, weights) where:
16 - space is a HammingGraph object with (dim, n_cat) equal to request.param,
17 - eigenfunctions is the respective Eigenfunctions object with at most 5 levels,
18 - X is a random sample of random size from the space,
19 - X2 is another random sample of random size from the space,
20 - weights is an array of positive numbers of shape (eigenfunctions.num_levels, 1).
21 """
22 d, q = request.param
23 space = HammingGraph(dim=d, n_cat=q)
24 eigenfunctions = space.get_eigenfunctions(min(space.dim + 1, 5))
26 key = np.random.RandomState(0)
27 N, N2 = key.randint(low=1, high=min(q**d, 10) + 1, size=2)
28 key, X = space.random(key, N)
29 key, X2 = space.random(key, N2)
31 # These weights are used for testing the weighted outerproduct, they
32 # should be positive.
33 weights = np.random.rand(eigenfunctions.num_levels, 1) ** 2 + 1e-5
35 return space, eigenfunctions, X, X2, weights
38def test_numbers_of_eigenfunctions(inputs):
39 space, eigenfunctions, _, _, _ = inputs
40 num_levels = eigenfunctions.num_levels
42 # If the number of levels is maximal, check that the number of
43 # eigenfunctions is equal to the number of categorical vectors.
44 if num_levels == space.dim + 1:
45 assert eigenfunctions.num_eigenfunctions == space.n_cat**space.dim
48@pytest.mark.parametrize("nu", [1.5, np.inf])
49@pytest.mark.parametrize("lengthscale", [1.0, 5.0, 10.0])
50@pytest.mark.parametrize("backend", ["numpy", "tensorflow", "torch", "jax"])
51def test_reduces_to_hypercube_when_q_equals_2(inputs, nu, lengthscale, backend):
52 space, eigenfunctions, X, X2, _ = inputs
54 if space.n_cat != 2:
55 pytest.skip("Only applicable when n_cat=2")
57 hypercube = HypercubeGraph(space.dim)
58 X_bool = X.astype(bool)
59 X2_bool = X2.astype(bool)
61 # Compare eigenvalues (backend-agnostic, only once per backend)
62 if lengthscale == 1.0 and nu == 1.5:
63 hamming_eigenvalues = space.get_eigenvalues(eigenfunctions.num_levels)
64 hypercube_eigenvalues = hypercube.get_eigenvalues(eigenfunctions.num_levels)
65 np.testing.assert_allclose(
66 hamming_eigenvalues, hypercube_eigenvalues, rtol=1e-10
67 )
69 # Compare kernel values with backend testing
70 kernel_hamming = MaternGeometricKernel(space)
71 kernel_hypercube = MaternGeometricKernel(hypercube)
73 params = {"nu": np.array([nu]), "lengthscale": np.array([lengthscale])}
74 K_hypercube = kernel_hypercube.K(params, X_bool, X2_bool)
76 check_function_with_backend(
77 backend,
78 K_hypercube,
79 kernel_hamming.K,
80 params,
81 X,
82 X2,
83 atol=1e-2,
84 )
87@pytest.mark.parametrize("lengthscale", [1.0, 5.0, 10.0])
88@pytest.mark.parametrize("backend", ["numpy", "tensorflow", "torch", "jax"])
89def test_against_analytic_heat_kernel(inputs, lengthscale, backend):
90 space, _, X, X2, _ = inputs
91 lengthscale = np.array([lengthscale])
92 result = hamming_graph_heat_kernel(lengthscale, X, X2, q=space.n_cat)
94 kernel = MaternGeometricKernel(space)
96 # Check that MaternGeometricKernel on HammingGraph with nu=infinity
97 # coincides with the closed form expression for the heat kernel on the
98 # Hamming graph.
99 check_function_with_backend(
100 backend,
101 result,
102 kernel.K,
103 {"nu": np.array([np.inf]), "lengthscale": lengthscale},
104 X,
105 X2,
106 atol=1e-2,
107 )