Coverage for tests/kernels/test_matern_karhunenloeve_kernel.py: 100%

76 statements  

« prev     ^ index     » next       coverage.py v7.11.3, created at 2025-11-16 21:43 +0000

1from itertools import product 

2 

3import lab as B 

4import numpy as np 

5import pytest 

6 

7from geometric_kernels.kernels import MaternKarhunenLoeveKernel 

8from geometric_kernels.kernels.matern_kernel import default_num 

9from geometric_kernels.spaces import Mesh 

10 

11from ..helper import check_function_with_backend, discrete_spectrum_spaces 

12 

13_EPS = 1e-5 

14 

15 

16@pytest.fixture( 

17 params=product(discrete_spectrum_spaces(), [True, False]), 

18 ids=lambda tpl: f"{tpl[0]}{'-normalized' if tpl[1] else ''}", 

19 scope="module", 

20) 

21def inputs(request): 

22 """ 

23 Returns a tuple (space, num_levels, kernel, X, X2) where: 

24 - space = request.param[0], 

25 - num_levels = default_num(space), 

26 - kernel = MaternKarhunenLoeveKernel(space, num_levels, normalize=request.param[1]), 

27 - X is a random sample of random size from the space, 

28 - X2 is another random sample of random size from the space, 

29 """ 

30 space, normalize = request.param 

31 num_levels = default_num(space) 

32 kernel = MaternKarhunenLoeveKernel(space, num_levels, normalize=normalize) 

33 

34 key = np.random.RandomState(0) 

35 N, N2 = key.randint(low=1, high=100 + 1, size=2) 

36 key, X = space.random(key, N) 

37 key, X2 = space.random(key, N2) 

38 

39 return space, num_levels, kernel, X, X2 

40 

41 

42def test_params(inputs): 

43 _, _, kernel, _, _ = inputs 

44 

45 params = kernel.init_params() 

46 

47 assert "lengthscale" in params 

48 assert params["lengthscale"].shape == (1,) 

49 assert "nu" in params 

50 assert params["nu"].shape == (1,) 

51 

52 

53def test_num_levels(inputs): 

54 _, num_levels, kernel, _, _ = inputs 

55 

56 assert kernel.eigenfunctions.num_levels == num_levels 

57 

58 

59@pytest.mark.parametrize("backend", ["numpy", "tensorflow", "torch", "jax"]) 

60def test_eigenvalues_shape(inputs, backend): 

61 _, num_levels, kernel, _, _ = inputs 

62 params = kernel.init_params() 

63 

64 # Check that the eigenvalues have appropriate shape. 

65 check_function_with_backend( 

66 backend, 

67 (num_levels, 1), 

68 kernel.eigenvalues, 

69 params, 

70 compare_to_result=lambda res, f_out: B.shape(f_out) == res, 

71 ) 

72 

73 

74@pytest.mark.parametrize("backend", ["numpy", "tensorflow", "torch", "jax"]) 

75def test_eigenvalues_positive(inputs, backend): 

76 _, _, kernel, _, _ = inputs 

77 params = kernel.init_params() 

78 

79 # Check that the eigenvalues are nonnegative. 

80 check_function_with_backend( 

81 backend, 

82 None, 

83 kernel.eigenvalues, 

84 params, 

85 compare_to_result=lambda _, f_out: np.all(B.to_numpy(f_out) >= 0), 

86 ) 

87 

88 

89@pytest.mark.parametrize("backend", ["numpy", "tensorflow", "torch", "jax"]) 

90def test_eigenvalues_ordered(inputs, backend): 

91 _, _, kernel, _, _ = inputs 

92 params = kernel.init_params() 

93 

94 # Check that the eigenvalues are sorted in descending order. 

95 check_function_with_backend( 

96 backend, 

97 None, 

98 kernel.eigenvalues, 

99 params, 

100 compare_to_result=lambda _, f_out: np.all( 

101 B.to_numpy(f_out)[:-1] >= B.to_numpy(f_out)[1:] - _EPS 

102 ), 

103 ) 

104 

105 

106@pytest.mark.parametrize("backend", ["numpy", "tensorflow", "torch", "jax"]) 

107def test_K(inputs, backend): 

108 _, _, kernel, X, X2 = inputs 

109 params = kernel.init_params() 

110 

111 result = kernel.K(params, X, X2) 

112 

113 assert result.shape == (X.shape[0], X2.shape[0]), "K has incorrect shape" 

114 

115 if backend != "numpy": 

116 # Check that kernel.K computed using `backend` coincides with the numpy result. 

117 check_function_with_backend( 

118 backend, 

119 result, 

120 kernel.K, 

121 params, 

122 X, 

123 X2, 

124 ) 

125 

126 

127@pytest.mark.parametrize("backend", ["numpy", "tensorflow", "torch", "jax"]) 

128def test_K_one_param(inputs, backend): 

129 space, _, kernel, X, _ = inputs 

130 params = kernel.init_params() 

131 

132 result = kernel.K(params, X, X) 

133 

134 # Check that kernel.K(X) coincides with kernel.K(X, X). 

135 check_function_with_backend( 

136 backend, 

137 result, 

138 kernel.K, 

139 params, 

140 X, 

141 atol=1e-2 if isinstance(space, Mesh) else _EPS, 

142 ) 

143 

144 

145@pytest.mark.parametrize("backend", ["numpy", "tensorflow", "torch", "jax"]) 

146def test_K_diag(inputs, backend): 

147 space, _, kernel, X, _ = inputs 

148 params = kernel.init_params() 

149 

150 result = kernel.K(params, X).diagonal() 

151 

152 assert result.shape == (X.shape[0],), "The diagonal has incorrect shape" 

153 

154 # Check that kernel.K_diag coincides with the diagonal of kernel.K. 

155 check_function_with_backend( 

156 backend, 

157 result, 

158 kernel.K_diag, 

159 params, 

160 X, 

161 atol=1e-2 if isinstance(space, Mesh) else _EPS, 

162 ) 

163 

164 

165@pytest.mark.parametrize("backend", ["numpy", "tensorflow", "torch", "jax"]) 

166def test_normalize(inputs, backend): 

167 space, _, kernel, _, _ = inputs 

168 

169 if not kernel.normalize: 

170 pytest.skip("No need to check normalization for an unnormalized kernel") 

171 

172 params = kernel.init_params() 

173 key = np.random.RandomState(0) 

174 key, X = space.random( 

175 key, 1000 

176 ) # we need a large sample to get a good estimate of the mean variance 

177 

178 def mean_variance(params, X): 

179 return B.reshape( 

180 B.mean(kernel.K_diag(params, X), squeeze=False), 

181 1, 

182 ) # the reshape shields from a bug in lab present at least up to version 1.6.6 

183 

184 # Check that the average variance of the kernel is 1. 

185 check_function_with_backend( 

186 backend, 

187 np.array([1.0]), 

188 mean_variance, 

189 params, 

190 X, 

191 atol=0.2, # very loose, but helps make sure the result is close to 1 

192 )