Coverage for tests/kernels/test_matern_karhunenloeve_kernel.py: 100%
76 statements
« prev ^ index » next coverage.py v7.11.3, created at 2025-11-16 21:43 +0000
« prev ^ index » next coverage.py v7.11.3, created at 2025-11-16 21:43 +0000
1from itertools import product
3import lab as B
4import numpy as np
5import pytest
7from geometric_kernels.kernels import MaternKarhunenLoeveKernel
8from geometric_kernels.kernels.matern_kernel import default_num
9from geometric_kernels.spaces import Mesh
11from ..helper import check_function_with_backend, discrete_spectrum_spaces
13_EPS = 1e-5
16@pytest.fixture(
17 params=product(discrete_spectrum_spaces(), [True, False]),
18 ids=lambda tpl: f"{tpl[0]}{'-normalized' if tpl[1] else ''}",
19 scope="module",
20)
21def inputs(request):
22 """
23 Returns a tuple (space, num_levels, kernel, X, X2) where:
24 - space = request.param[0],
25 - num_levels = default_num(space),
26 - kernel = MaternKarhunenLoeveKernel(space, num_levels, normalize=request.param[1]),
27 - X is a random sample of random size from the space,
28 - X2 is another random sample of random size from the space,
29 """
30 space, normalize = request.param
31 num_levels = default_num(space)
32 kernel = MaternKarhunenLoeveKernel(space, num_levels, normalize=normalize)
34 key = np.random.RandomState(0)
35 N, N2 = key.randint(low=1, high=100 + 1, size=2)
36 key, X = space.random(key, N)
37 key, X2 = space.random(key, N2)
39 return space, num_levels, kernel, X, X2
42def test_params(inputs):
43 _, _, kernel, _, _ = inputs
45 params = kernel.init_params()
47 assert "lengthscale" in params
48 assert params["lengthscale"].shape == (1,)
49 assert "nu" in params
50 assert params["nu"].shape == (1,)
53def test_num_levels(inputs):
54 _, num_levels, kernel, _, _ = inputs
56 assert kernel.eigenfunctions.num_levels == num_levels
59@pytest.mark.parametrize("backend", ["numpy", "tensorflow", "torch", "jax"])
60def test_eigenvalues_shape(inputs, backend):
61 _, num_levels, kernel, _, _ = inputs
62 params = kernel.init_params()
64 # Check that the eigenvalues have appropriate shape.
65 check_function_with_backend(
66 backend,
67 (num_levels, 1),
68 kernel.eigenvalues,
69 params,
70 compare_to_result=lambda res, f_out: B.shape(f_out) == res,
71 )
74@pytest.mark.parametrize("backend", ["numpy", "tensorflow", "torch", "jax"])
75def test_eigenvalues_positive(inputs, backend):
76 _, _, kernel, _, _ = inputs
77 params = kernel.init_params()
79 # Check that the eigenvalues are nonnegative.
80 check_function_with_backend(
81 backend,
82 None,
83 kernel.eigenvalues,
84 params,
85 compare_to_result=lambda _, f_out: np.all(B.to_numpy(f_out) >= 0),
86 )
89@pytest.mark.parametrize("backend", ["numpy", "tensorflow", "torch", "jax"])
90def test_eigenvalues_ordered(inputs, backend):
91 _, _, kernel, _, _ = inputs
92 params = kernel.init_params()
94 # Check that the eigenvalues are sorted in descending order.
95 check_function_with_backend(
96 backend,
97 None,
98 kernel.eigenvalues,
99 params,
100 compare_to_result=lambda _, f_out: np.all(
101 B.to_numpy(f_out)[:-1] >= B.to_numpy(f_out)[1:] - _EPS
102 ),
103 )
106@pytest.mark.parametrize("backend", ["numpy", "tensorflow", "torch", "jax"])
107def test_K(inputs, backend):
108 _, _, kernel, X, X2 = inputs
109 params = kernel.init_params()
111 result = kernel.K(params, X, X2)
113 assert result.shape == (X.shape[0], X2.shape[0]), "K has incorrect shape"
115 if backend != "numpy":
116 # Check that kernel.K computed using `backend` coincides with the numpy result.
117 check_function_with_backend(
118 backend,
119 result,
120 kernel.K,
121 params,
122 X,
123 X2,
124 )
127@pytest.mark.parametrize("backend", ["numpy", "tensorflow", "torch", "jax"])
128def test_K_one_param(inputs, backend):
129 space, _, kernel, X, _ = inputs
130 params = kernel.init_params()
132 result = kernel.K(params, X, X)
134 # Check that kernel.K(X) coincides with kernel.K(X, X).
135 check_function_with_backend(
136 backend,
137 result,
138 kernel.K,
139 params,
140 X,
141 atol=1e-2 if isinstance(space, Mesh) else _EPS,
142 )
145@pytest.mark.parametrize("backend", ["numpy", "tensorflow", "torch", "jax"])
146def test_K_diag(inputs, backend):
147 space, _, kernel, X, _ = inputs
148 params = kernel.init_params()
150 result = kernel.K(params, X).diagonal()
152 assert result.shape == (X.shape[0],), "The diagonal has incorrect shape"
154 # Check that kernel.K_diag coincides with the diagonal of kernel.K.
155 check_function_with_backend(
156 backend,
157 result,
158 kernel.K_diag,
159 params,
160 X,
161 atol=1e-2 if isinstance(space, Mesh) else _EPS,
162 )
165@pytest.mark.parametrize("backend", ["numpy", "tensorflow", "torch", "jax"])
166def test_normalize(inputs, backend):
167 space, _, kernel, _, _ = inputs
169 if not kernel.normalize:
170 pytest.skip("No need to check normalization for an unnormalized kernel")
172 params = kernel.init_params()
173 key = np.random.RandomState(0)
174 key, X = space.random(
175 key, 1000
176 ) # we need a large sample to get a good estimate of the mean variance
178 def mean_variance(params, X):
179 return B.reshape(
180 B.mean(kernel.K_diag(params, X), squeeze=False),
181 1,
182 ) # the reshape shields from a bug in lab present at least up to version 1.6.6
184 # Check that the average variance of the kernel is 1.
185 check_function_with_backend(
186 backend,
187 np.array([1.0]),
188 mean_variance,
189 params,
190 X,
191 atol=0.2, # very loose, but helps make sure the result is close to 1
192 )