Coverage for tests / utils / test_kernel_formulas.py: 100%
79 statements
« prev ^ index » next coverage.py v7.13.4, created at 2026-02-22 15:49 +0000
« prev ^ index » next coverage.py v7.13.4, created at 2026-02-22 15:49 +0000
1from math import log, tanh
3import numpy as np
4import pytest
5from sklearn.metrics.pairwise import rbf_kernel
7from geometric_kernels.spaces import HammingGraph, HypercubeGraph
8from geometric_kernels.utils.kernel_formulas import (
9 hamming_graph_heat_kernel,
10 hypercube_graph_heat_kernel,
11)
13from ..helper import check_function_with_backend
16@pytest.mark.parametrize("d", [1, 5, 10])
17@pytest.mark.parametrize("lengthscale", [1.0, 5.0, 10.0])
18@pytest.mark.parametrize("backend", ["numpy", "tensorflow", "torch", "jax"])
19def test_hypercube_graph_heat_kernel(d, lengthscale, backend):
20 space = HypercubeGraph(d)
22 key = np.random.RandomState(0)
23 N, N2 = key.randint(low=1, high=min(2**d, 10) + 1, size=2)
24 key, X = space.random(key, N)
25 key, X2 = space.random(key, N2)
27 gamma = -log(tanh(lengthscale**2 / 2))
28 result = rbf_kernel(X, X2, gamma=gamma)
30 def heat_kernel(lengthscale, X, X2):
31 return hypercube_graph_heat_kernel(
32 lengthscale, X, X2, normalized_laplacian=False
33 )
35 # Checks that the heat kernel on the hypercube graph coincides with the RBF
36 # restricted onto binary vectors, with appropriately redefined length scale.
37 check_function_with_backend(
38 backend,
39 result,
40 heat_kernel,
41 np.array([lengthscale]),
42 X,
43 X2,
44 atol=1e-2,
45 )
47 if d > 5:
48 X_first = X[0:1, :3]
49 X2_first = X2[0:1, :3]
50 X_second = X[0:1, 3:]
51 X2_second = X2[0:1, 3:]
53 K_first = hypercube_graph_heat_kernel(
54 np.array([lengthscale]), X_first, X2_first, normalized_laplacian=False
55 )
56 K_second = hypercube_graph_heat_kernel(
57 np.array([lengthscale]), X_second, X2_second, normalized_laplacian=False
58 )
60 result = K_first * K_second
62 # Checks that the heat kernel of the product is equal to the product
63 # of heat kernels.
64 check_function_with_backend(
65 backend,
66 result,
67 heat_kernel,
68 np.array([lengthscale]),
69 X[0:1, :],
70 X2[0:1, :],
71 )
74@pytest.mark.parametrize("d", [1, 5, 10])
75@pytest.mark.parametrize("lengthscale", [1.0, 5.0, 10.0])
76@pytest.mark.parametrize("backend", ["numpy", "tensorflow", "torch", "jax"])
77def test_hamming_graph_reduces_to_hypercube_when_q_equals_2(d, lengthscale, backend):
78 space = HypercubeGraph(d)
80 key = np.random.RandomState(0)
81 N, N2 = key.randint(low=1, high=min(2**d, 10) + 1, size=2)
82 key, X = space.random(key, N)
83 key, X2 = space.random(key, N2)
85 def heat_kernel_hamming(lengthscale, X, X2):
86 return hamming_graph_heat_kernel(
87 lengthscale, X, X2, q=2, normalized_laplacian=False
88 )
90 # Compute reference using hypercube formula
91 result = hypercube_graph_heat_kernel(
92 np.array([lengthscale]), X, X2, normalized_laplacian=False
93 )
95 # Check that general Hamming formula with q=2 matches hypercube
96 check_function_with_backend(
97 backend,
98 result,
99 heat_kernel_hamming,
100 np.array([lengthscale]),
101 X,
102 X2,
103 )
106@pytest.mark.parametrize("d", [1, 5, 10])
107@pytest.mark.parametrize("q", [2, 5, 7])
108@pytest.mark.parametrize("lengthscale", [1.0, 5.0, 10.0])
109@pytest.mark.parametrize("backend", ["numpy", "tensorflow", "torch", "jax"])
110def test_hamming_graph_heat_kernel(d, q, lengthscale, backend):
111 space = HammingGraph(d, q)
113 key = np.random.RandomState(0)
114 N, N2 = key.randint(low=1, high=min(q**d, 10) + 1, size=2)
115 key, X = space.random(key, N)
116 key, X2 = space.random(key, N2)
118 def to_one_hot(X_cat, q):
119 """Convert categorical matrix [N, d] to one-hot [N, d*q]."""
120 N, d = X_cat.shape
121 X_onehot = np.zeros((N, d * q), dtype=float)
122 for i in range(N):
123 for j in range(d):
124 X_onehot[i, j * q + X_cat[i, j]] = 1.0
125 return X_onehot
127 X_onehot = to_one_hot(X, q)
128 X2_onehot = to_one_hot(X2, q)
130 beta = lengthscale**2 / 2
131 exp_neg_beta_q = np.exp(-beta * q)
132 factor_disagree = (1 - exp_neg_beta_q) / (1 + (q - 1) * exp_neg_beta_q)
133 gamma = -np.log(factor_disagree) / 2 # one-hot counts differences twice
135 result = rbf_kernel(X_onehot, X2_onehot, gamma=gamma)
137 def heat_kernel(lengthscale, X, X2):
138 return hamming_graph_heat_kernel(
139 lengthscale, X, X2, q=q, normalized_laplacian=False
140 )
142 # Checks that the heat kernel on the Hamming graph coincides with the RBF
143 # restricted onto categorical vectors, with appropriately redefined length scale.
144 check_function_with_backend(
145 backend,
146 result,
147 heat_kernel,
148 np.array([lengthscale]),
149 X,
150 X2,
151 atol=1e-2,
152 )
154 if d > 5:
155 X_first = X[0:1, :3]
156 X2_first = X2[0:1, :3]
157 X_second = X[0:1, 3:]
158 X2_second = X2[0:1, 3:]
160 K_first = hamming_graph_heat_kernel(
161 np.array([lengthscale]), X_first, X2_first, q=q, normalized_laplacian=False
162 )
163 K_second = hamming_graph_heat_kernel(
164 np.array([lengthscale]),
165 X_second,
166 X2_second,
167 q=q,
168 normalized_laplacian=False,
169 )
171 result = K_first * K_second
173 # Checks that the heat kernel of the product is equal to the product
174 # of heat kernels.
175 check_function_with_backend(
176 backend,
177 result,
178 heat_kernel,
179 np.array([lengthscale]),
180 X[0:1, :],
181 X2[0:1, :],
182 )