@@ -93,9 +93,8 @@ class RandomFourierKernelApprox(KernelApproximation):
9393 n_features_out_ : int
9494 Number of features output. This attribute is not available in
9595 estimators from :mod:`sklearn.kernel_approximation`.
96- ift_ : scipy.stats.rv_continuous
97- Probability distribution corresponding to inverse Fourier transform of
98- chosen kernel.
96+ ft_ : scipy.stats.rv_continuous
97+ Probability distribution corresponding to Fourier transform of chosen kernel.
9998 random_weights_ : np.ndarray, shape (n_features, n_components)
10099 Random weights to inner-product with features.
101100 random_offsets_ : np.ndarray, shape (n_features, )
@@ -106,7 +105,7 @@ class RandomFourierKernelApprox(KernelApproximation):
106105 Generate random Fourier features from a Gaussian kernel
107106
108107 >>> ka = pykoop.RandomFourierKernelApprox(
109- ... kernel_or_ift ='gaussian',
108+ ... kernel_or_ft ='gaussian',
110109 ... n_components=10,
111110 ... shape=1,
112111 ... random_state=1234,
@@ -117,20 +116,20 @@ class RandomFourierKernelApprox(KernelApproximation):
117116 array([...])
118117 """
119118
120- _ift_lookup = {
119+ _ft_lookup = {
121120 'gaussian' : scipy .stats .norm ,
122121 'laplacian' : scipy .stats .cauchy ,
123122 'cauchy' : scipy .stats .laplace ,
124123 }
125- """Lookup table for inverse Fourier transform of kernel.
124+ """Lookup table for Fourier transform of kernel.
126125
127126 Laplacian and Cauchy being swapped is not a typo. They are Fourier
128127 transforms of each other.
129128 """
130129
131130 def __init__ (
132131 self ,
133- kernel_or_ift : Union [str , scipy .stats .rv_continuous ] = 'gaussian' ,
132+ kernel_or_ft : Union [str , scipy .stats .rv_continuous ] = 'gaussian' ,
134133 n_components : int = 100 ,
135134 shape : float = 1 ,
136135 method : str = 'weight_offset' ,
@@ -140,19 +139,19 @@ def __init__(
140139
141140 Parameters
142141 ----------
143- kernel_or_ift : Union[str, scipy.stats.rv_continuous]
142+ kernel_or_ft : Union[str, scipy.stats.rv_continuous]
144143 Kernel to approximate. Possible options are
145144
146- - ``'gaussian'`` -- Gaussian kernel, with inverse Fourier
147- transform :class:`scipy.stats.norm` (default),
148- - ``'laplacian'`` -- Laplacian kernel, with inverse Fourier
149- transform :class:`scipy.stats.cauchy`, or
150- - ``'cauchy'`` -- Cauchy kernel, with inverse Fourier transform
151- :class:`scipy.stats.laplace`.
145+ - ``'gaussian'`` -- Gaussian kernel, with Fourier transform
146+ :class:`scipy.stats.norm` (default),
147+ - ``'laplacian'`` -- Laplacian kernel, with Fourier transform
148+ :class:`scipy.stats.cauchy`, or
149+ - ``'cauchy'`` -- Cauchy kernel, with Fourier transform
150+ :class:`scipy.stats.laplace`.
152151
153152 Alternatively, a positive, shift-invariant kernel can be implicitly
154- specified by providing its inverse Fourier transform as a
155- univariate probability distribution subclassing
153+ specified by providing its Fourier transform as a univariate
154+ probability distribution subclassing
156155 :class:`scipy.stats.rv_continuous`.
157156
158157 n_components : int
@@ -179,7 +178,7 @@ def __init__(
179178 random_state : Union[int, np.random.RandomState, None]
180179 Random seed.
181180 """
182- self .kernel_or_ift = kernel_or_ift
181+ self .kernel_or_ft = kernel_or_ft
183182 self .n_components = n_components
184183 self .shape = shape
185184 self .method = method
@@ -210,11 +209,11 @@ def fit(
210209 If any of the constructor parameters are incorrect.
211210 """
212211 X = sklearn .utils .validation .check_array (X )
213- # Set inverse Fourier transform
214- if isinstance (self .kernel_or_ift , str ):
215- self .ift_ = self ._ift_lookup [self .kernel_or_ift ]
212+ # Set Fourier transform
213+ if isinstance (self .kernel_or_ft , str ):
214+ self .ft_ = self ._ft_lookup [self .kernel_or_ft ]
216215 else :
217- self .ift_ = self .kernel_or_ift
216+ self .ft_ = self .kernel_or_ft
218217 # Validate input
219218 if self .n_components <= 0 :
220219 raise ValueError ('`n_components` must be positive.' )
@@ -230,7 +229,7 @@ def fit(
230229 else :
231230 self .n_features_out_ = self .n_components
232231 # Generate random weights
233- self .random_weights_ = self .ift_ .rvs (
232+ self .random_weights_ = self .ft_ .rvs (
234233 scale = 1 ,
235234 size = (self .n_features_in_ , self .n_components ),
236235 random_state = self .random_state ,
@@ -249,7 +248,7 @@ def fit(
249248 # Easiest way to make sure distribution is univariate is to check the
250249 # dimension of the output.
251250 if self .random_weights_ .ndim != 2 :
252- raise ValueError ('`kernel_or_ift ` must specify a univariate '
251+ raise ValueError ('`kernel_or_ft ` must specify a univariate '
253252 'probability distribution.' )
254253 return self
255254
0 commit comments