Coverage for geometric_kernels/kernels/feature_map.py: 100%
33 statements
« prev ^ index » next coverage.py v7.11.3, created at 2025-11-16 21:43 +0000
« prev ^ index » next coverage.py v7.11.3, created at 2025-11-16 21:43 +0000
1"""
2This module provides the :class:`MaternFeatureMapKernel` kernel, the basic
3kernel for non-compact symmetric spaces, subclasses of
4:class:`~.spaces.NoncompactSymmetricSpace`.
5"""
7import lab as B
8import numpy as np
9from beartype.typing import Dict, Optional
11from geometric_kernels.feature_maps import FeatureMap
12from geometric_kernels.kernels.base import BaseGeometricKernel
13from geometric_kernels.spaces.base import Space
14from geometric_kernels.utils.utils import (
15 _check_1_vector,
16 _check_field_in_params,
17 make_deterministic,
18)
21class MaternFeatureMapKernel(BaseGeometricKernel):
22 r"""
23 This class computes a (Matérn) kernel based on a feature map.
25 .. math :: k_{\nu, \kappa}(x, y) = \langle \phi_{\nu, \kappa}(x), \phi_{\nu, \kappa}(y) \rangle_{\mathbb{R}^n}
27 where $\langle \cdot , \cdot \rangle_{\mathbb{R}^n}$ is the standard inner
28 product in $\mathbb{R}^n$ and $\phi_{\nu, \kappa}: X \to \mathbb{R}^n$ is
29 an arbitrary function called *feature map*. We assume that it depends
30 on the smoothness and length scale parameters $\nu$ and $\kappa$,
31 respectively, which makes this kernel specifically Matérn.
33 .. note::
34 A brief introduction into feature maps and related kernels can be
35 found on :doc:`this page </theory/feature_maps>`.
37 Note that the finite-dimensional feature maps this kernel is meant to
38 be used with are, in most cases, some approximations of the
39 intractable infinite-dimensional feature maps.
41 :param space:
42 The space on which the kernel is defined.
43 :param feature_map:
44 A :class:`~.feature_maps.FeatureMap` object that represents an
45 arbitrary function $\phi_{\nu, \kappa}: X \to \mathbb{R}^n$, where
46 $X$ is the `space`, $n$ can be an arbitrary finite integer, and
47 $\nu, \kappa$ are the smoothness and length scale parameters.
48 :param key:
49 Random state, either `np.random.RandomState`,
50 `tf.random.Generator`, `torch.Generator` or `jax.tensor` (which
51 represents a random state).
53 Many feature maps used in the library are randomized, thus requiring a
54 `key` to work. The :class:`MaternFeatureMapKernel` uses this `key` to
55 make them (and thus the kernel) deterministic, applying the utility
56 function :func:`~.make_deterministic` to the pair `feature_map, key`.
58 .. note::
59 Even if the `feature_map` is deterministic, you need to provide a
60 valid key, although it will essentially be ignored. In the future,
61 we should probably make the `key` parameter optional.
63 :param normalize:
64 This parameter is directly passed on to the `feature_map` as a keyword
65 argument "normalize". If normalize=True, then either $k(x, x) = 1$ for
66 all $x \in X$, or $\int_X k(x, x) d x = 1$, depending on the type of
67 the feature map and on the space $X$.
69 .. note::
70 For many kernel methods, $k(\cdot, \cdot)$ and $a k(\cdot, \cdot)$
71 are indistinguishable, whatever the positive constant $a$ is. For
72 these, it makes sense to use normalize=False to save up some
73 computational overhead. For others, like for the Gaussian process
74 regression, the normalization of the kernel might be important. In
75 these cases, you will typically want to set normalize=True.
76 """
78 def __init__(
79 self,
80 space: Space,
81 feature_map: FeatureMap,
82 key: B.RandomState,
83 normalize: bool = True,
84 ):
85 super().__init__(space)
86 self.feature_map = make_deterministic(feature_map, key)
87 self.normalize = normalize
89 def init_params(self) -> Dict[str, B.NPNumeric]:
90 """
91 Initializes the dict of the trainable parameters of the kernel.
93 Returns `dict(nu=np.array([np.inf]), lengthscale=np.array([1.0]))`.
95 This dict can be modified and is passed around into such methods as
96 :meth:`~.K` or :meth:`~.K_diag`, as the `params` argument.
98 .. note::
99 The values in the returned dict are always of the NumPy array type.
100 Thus, if you want to use some other backend for internal
101 computations when calling :meth:`~.K` or :meth:`~.K_diag`, you
102 need to replace the values with the analogs typed as arrays of
103 the desired backend.
104 """
105 params = dict(nu=np.array([np.inf]), lengthscale=np.array([1.0]))
106 return params
108 def K(
109 self,
110 params: Dict[str, B.Numeric],
111 X: B.Numeric,
112 X2: Optional[B.Numeric] = None,
113 **kwargs,
114 ):
115 _check_field_in_params(params, "lengthscale")
116 _check_1_vector(params["lengthscale"], 'params["lengthscale"]')
118 _check_field_in_params(params, "nu")
119 _check_1_vector(params["nu"], 'params["nu"]')
121 _, features_X = self.feature_map(
122 X, params, normalize=self.normalize, **kwargs
123 ) # [N, O]
124 if X2 is not None:
125 _, features_X2 = self.feature_map(
126 X2, params, normalize=self.normalize, **kwargs
127 ) # [N2, O]
128 else:
129 features_X2 = features_X
131 feature_product = B.einsum("...no,...mo->...nm", features_X, features_X2)
132 return feature_product
134 def K_diag(self, params: Dict[str, B.Numeric], X: B.Numeric, **kwargs):
135 _check_field_in_params(params, "lengthscale")
136 _check_1_vector(params["lengthscale"], 'params["lengthscale"]')
138 _check_field_in_params(params, "nu")
139 _check_1_vector(params["nu"], 'params["nu"]')
141 _, features_X = self.feature_map(
142 X, params, normalize=self.normalize, **kwargs
143 ) # [N, O]
144 return B.sum(features_X**2, axis=-1) # [N, ]