diff --git a/doc/conf.py b/doc/conf.py index 66ff3dcd..796b7861 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -69,3 +69,14 @@ # Switch to old behavior with html4, for a good display of references, # as described in https://github.com/sphinx-doc/sphinx/issues/6705 html4_writer = True + + +# Temporary work-around for spacing problem between parameter and parameter +# type in the doc, see https://github.com/numpy/numpydoc/issues/215. The bug +# has been fixed in sphinx (https://github.com/sphinx-doc/sphinx/pull/5976) but +# through a change in sphinx basic.css except rtd_theme does not use basic.css. +# In an ideal world, this would get fixed in this PR: +# https://github.com/readthedocs/sphinx_rtd_theme/pull/747/files +def setup(app): + app.add_javascript('js/copybutton.js') + app.add_stylesheet("basic.css") diff --git a/doc/weakly_supervised.rst b/doc/weakly_supervised.rst index cf313ba1..ba21f510 100644 --- a/doc/weakly_supervised.rst +++ b/doc/weakly_supervised.rst @@ -483,7 +483,7 @@ is the off-diagonal L1 norm. L1-penalized log-determinant regularization `_. ICML 2009. - .. [2] Adapted from https://gist.github.com/kcarnold/5439945 + .. [2] Code adapted from https://gist.github.com/kcarnold/5439945 .. _rca: @@ -794,6 +794,6 @@ by default, :math:`D_{ld}(\mathbf{\cdot, \cdot})` is the LogDet divergence: `Metric Learning from Relative Comparisons by Minimizing Squared Residual `_. ICDM 2012 - .. [2] Adapted from https://gist.github.com/kcarnold/5439917 + .. [2] Code adapted from https://gist.github.com/kcarnold/5439917 diff --git a/metric_learn/base_metric.py b/metric_learn/base_metric.py index d19998ff..f0fca508 100644 --- a/metric_learn/base_metric.py +++ b/metric_learn/base_metric.py @@ -39,7 +39,7 @@ def score_pairs(self, pairs): Returns ------- - scores: `numpy.ndarray` of shape=(n_pairs,) + scores : `numpy.ndarray` of shape=(n_pairs,) The score of every pair. See Also @@ -69,19 +69,19 @@ def _prepare_inputs(self, X, y=None, type_of_inputs='classic', Parameters ---------- - input: array-like + X : array-like The input data array to check. y : array-like The input labels array to check. - type_of_inputs: `str` {'classic', 'tuples'} + type_of_inputs : `str` {'classic', 'tuples'} The type of inputs to check. If 'classic', the input should be a 2D array-like of points or a 1D array like of indicators of points. If 'tuples', the input should be a 3D array-like of tuples or a 2D array-like of indicators of tuples. - **kwargs: dict + **kwargs : dict Arguments to pass to check_input. Returns @@ -89,7 +89,7 @@ def _prepare_inputs(self, X, y=None, type_of_inputs='classic', X : `numpy.ndarray` The checked input data array. - y: `numpy.ndarray` (optional) + y : `numpy.ndarray` (optional) The checked input labels array. """ self._check_preprocessor() @@ -203,7 +203,7 @@ def score_pairs(self, pairs): Returns ------- - scores: `numpy.ndarray` of shape=(n_pairs,) + scores : `numpy.ndarray` of shape=(n_pairs,) The learned Mahalanobis distance for every pair. See Also @@ -271,7 +271,7 @@ def metric_fun(u, v, squared=False): Returns ------- - distance: float + distance : float The distance between u and v according to the new metric. """ u = validate_vector(u) diff --git a/metric_learn/constraints.py b/metric_learn/constraints.py index 36d77194..5a62063a 100644 --- a/metric_learn/constraints.py +++ b/metric_learn/constraints.py @@ -19,12 +19,12 @@ class Constraints(object): Parameters ---------- partial_labels : `numpy.ndarray` of ints, shape=(n_samples,) - Array of labels, with -1 indicating unknown label. + Array of labels, with -1 indicating unknown label. Attributes ---------- partial_labels : `numpy.ndarray` of ints, shape=(n_samples,) - Array of labels, with -1 indicating unknown label. + Array of labels, with -1 indicating unknown label. """ def __init__(self, partial_labels): @@ -45,26 +45,29 @@ def positive_negative_pairs(self, num_constraints, same_length=False, Parameters ---------- - num_constraints : int - Number of positive and negative constraints to generate. - same_length : bool, optional (default=False) - If True, forces the number of positive and negative pairs to be - equal by ignoring some pairs from the larger set. - random_state : int or numpy.RandomState or None, optional (default=None) - A pseudo random number generator object or a seed for it if int. + num_constraints : int + Number of positive and negative constraints to generate. + + same_length : bool, optional (default=False) + If True, forces the number of positive and negative pairs to be + equal by ignoring some pairs from the larger set. + + random_state : int or numpy.RandomState or None, optional (default=None) + A pseudo random number generator object or a seed for it if int. + Returns ------- a : array-like, shape=(n_constraints,) - 1D array of indicators for the left elements of positive pairs. + 1D array of indicators for the left elements of positive pairs. b : array-like, shape=(n_constraints,) - 1D array of indicators for the right elements of positive pairs. + 1D array of indicators for the right elements of positive pairs. c : array-like, shape=(n_constraints,) - 1D array of indicators for the left elements of negative pairs. + 1D array of indicators for the left elements of negative pairs. d : array-like, shape=(n_constraints,) - 1D array of indicators for the right elements of negative pairs. + 1D array of indicators for the right elements of negative pairs. """ random_state = check_random_state(random_state) a, b = self._pairs(num_constraints, same_label=True, diff --git a/metric_learn/itml.py b/metric_learn/itml.py index 2094e160..5db438d8 100644 --- a/metric_learn/itml.py +++ b/metric_learn/itml.py @@ -211,9 +211,9 @@ class ITML(_BaseITML, _PairsClassifierMixin): References ---------- - .. [1] `Information-theoretic Metric Learning + .. [1] Jason V. Davis, et al. `Information-theoretic Metric Learning `_ Jason V. Davis, et al. + /DavisKJSD07_ICML.pdf>`_. ICML 2007. """ def fit(self, pairs, y, bounds=None, calibration_params=None): @@ -229,8 +229,10 @@ def fit(self, pairs, y, bounds=None, calibration_params=None): 3D Array of pairs with each row corresponding to two points, or 2D array of indices of pairs if the metric learner uses a preprocessor. + y: array-like, of shape (n_constraints,) Labels of constraints. Should be -1 for dissimilar pair, 1 for similar. + bounds : array-like of two numbers Bounds on similarity, aside slack variables, s.t. ``d(a, b) < bounds_[0]`` for all given pairs of similar points ``a`` @@ -239,6 +241,7 @@ def fit(self, pairs, y, bounds=None, calibration_params=None): If not provided at initialization, bounds_[0] and bounds_[1] will be set to the 5th and 95th percentile of the pairwise distances among all points present in the input `pairs`. + calibration_params : `dict` or `None` Dictionary of parameters to give to `calibrate_threshold` for the threshold calibration step done at the end of `fit`. If `None` is @@ -280,7 +283,7 @@ class ITML_Supervised(_BaseITML, TransformerMixin): `num_labeled` was deprecated in version 0.5.0 and will be removed in 0.6.0. - num_constraints: int, optional (default=None) + num_constraints : int, optional (default=None) Number of constraints to generate. If None, default to `20 * num_classes**2`. diff --git a/metric_learn/lfda.py b/metric_learn/lfda.py index 12617a94..a970e789 100644 --- a/metric_learn/lfda.py +++ b/metric_learn/lfda.py @@ -39,10 +39,16 @@ class LFDA(MahalanobisMixin, TransformerMixin): defaults to min(7, n_features - 1). embedding_type : str, optional (default: 'weighted') - Type of metric in the embedding space - 'weighted' - weighted eigenvectors - 'orthonormalized' - orthonormalized - 'plain' - raw eigenvectors + Type of metric in the embedding space. + + 'weighted' + weighted eigenvectors + + 'orthonormalized' + orthonormalized + + 'plain' + raw eigenvectors preprocessor : array-like, shape=(n_samples, n_features) or callable The preprocessor to call to get tuples from indices. If array-like, @@ -67,13 +73,14 @@ class LFDA(MahalanobisMixin, TransformerMixin): References ------------------ - .. [1] `Dimensionality Reduction of Multimodal Labeled Data by Local Fisher - Discriminant Analysis `_ - Masashi Sugiyama. - - .. [2] `Local Fisher Discriminant Analysis on Beer Style Clustering - `_ Yuan Tang. + .. [1] Masashi Sugiyama. `Dimensionality Reduction of Multimodal Labeled + Data by Local Fisher Discriminant Analysis + `_. JMLR 2007. + + .. [2] Yuan Tang. `Local Fisher Discriminant Analysis on Beer Style + Clustering + `_. ''' def __init__(self, n_components=None, num_dims='deprecated', diff --git a/metric_learn/lmnn.py b/metric_learn/lmnn.py index df8fe649..a026a8f6 100644 --- a/metric_learn/lmnn.py +++ b/metric_learn/lmnn.py @@ -137,10 +137,11 @@ class LMNN(MahalanobisMixin, TransformerMixin): References ---------- - .. [1] `Distance Metric Learning for Large Margin Nearest Neighbor - Classification `_ - Kilian Q. Weinberger, John Blitzer, Lawrence K. Saul + .. [1] K. Q. Weinberger, J. Blitzer, L. K. Saul. `Distance Metric + Learning for Large Margin Nearest Neighbor Classification + `_. NIPS + 2005. """ def __init__(self, init=None, k=3, min_iter=50, max_iter=1000, diff --git a/metric_learn/lsml.py b/metric_learn/lsml.py index c4cdca97..5e84bf86 100644 --- a/metric_learn/lsml.py +++ b/metric_learn/lsml.py @@ -208,7 +208,7 @@ class LSML(_BaseLSML, _QuadrupletsClassifierMixin): Squared Residual `_. ICDM 2012. - .. [2] Adapted from https://gist.github.com/kcarnold/5439917 + .. [2] Code adapted from https://gist.github.com/kcarnold/5439917 See Also -------- diff --git a/metric_learn/mlkr.py b/metric_learn/mlkr.py index 3199b518..5fffee9b 100644 --- a/metric_learn/mlkr.py +++ b/metric_learn/mlkr.py @@ -73,15 +73,15 @@ class MLKR(MahalanobisMixin, TransformerMixin): :meth:`fit` and n_features_a must be less than or equal to that. If ``n_components`` is not None, n_features_a must match it. - A0: Not used. + A0 : Not used. .. deprecated:: 0.5.0 `A0` was deprecated in version 0.5.0 and will be removed in 0.6.0. Use 'init' instead. - tol: float, optional (default=None) + tol : float, optional (default=None) Convergence tolerance for the optimization. - max_iter: int, optional (default=1000) + max_iter : int, optional (default=1000) Cap on number of conjugate gradient iterations. verbose : bool, optional (default=False) @@ -118,9 +118,9 @@ class MLKR(MahalanobisMixin, TransformerMixin): References ---------- - .. [1] `Information-theoretic Metric Learning - `_ Jason V. Davis, et al. + .. [1] K.Q. Weinberger and G. Tesauto. `Metric Learning for Kernel + Regression `_. AISTATS 2007. """ def __init__(self, n_components=None, num_dims='deprecated', init=None, diff --git a/metric_learn/mmc.py b/metric_learn/mmc.py index 3769497e..3ef9c534 100644 --- a/metric_learn/mmc.py +++ b/metric_learn/mmc.py @@ -383,10 +383,6 @@ class MMC(_BaseMMC, _PairsClassifierMixin): An SPD matrix of shape (n_features, n_features), that will be used as such to initialize the metric. - preprocessor : array-like, shape=(n_samples, n_features) or callable - The preprocessor to call to get tuples from indices. If array-like, - tuples will be gotten like this: X[indices]. - A0 : Not used. .. deprecated:: 0.5.0 `A0` was deprecated in version 0.5.0 and will @@ -442,10 +438,11 @@ class MMC(_BaseMMC, _PairsClassifierMixin): References ---------- - .. [1] `Distance metric learning with application to clustering with - side-information `_ - Xing, Jordan, Russell, Ng. + .. [1] Xing, Jordan, Russell, Ng. `Distance metric learning with application + to clustering with side-information + `_. + NIPS 2002. See Also -------- @@ -538,10 +535,6 @@ class MMC_Supervised(_BaseMMC, TransformerMixin): A numpy array of shape (n_features, n_features), that will be used as such to initialize the metric. - preprocessor : array-like, shape=(n_samples, n_features) or callable - The preprocessor to call to get tuples from indices. If array-like, - tuples will be gotten like this: X[indices]. - A0 : Not used. .. deprecated:: 0.5.0 `A0` was deprecated in version 0.5.0 and will diff --git a/metric_learn/nca.py b/metric_learn/nca.py index 983f1120..fbce5658 100644 --- a/metric_learn/nca.py +++ b/metric_learn/nca.py @@ -123,7 +123,7 @@ class NCA(MahalanobisMixin, TransformerMixin): .. [1] J. Goldberger, G. Hinton, S. Roweis, R. Salakhutdinov. `Neighbourhood Components Analysis `_. - Advances in Neural Information Processing Systems. 17, 513-520, 2005. + NIPS 2005. .. [2] Wikipedia entry on `Neighborhood Components Analysis `_ diff --git a/metric_learn/rca.py b/metric_learn/rca.py index f3a2ac89..32024a43 100644 --- a/metric_learn/rca.py +++ b/metric_learn/rca.py @@ -72,10 +72,10 @@ class RCA(MahalanobisMixin, TransformerMixin): References ------------------ - .. [1] `Adjustment learning and relevant component analysis - `_ Noam - Shental, et al. + .. [1] Noam Shental, et al. `Adjustment learning and relevant component + analysis `_ . + ECCV 2002. Attributes diff --git a/metric_learn/sdml.py b/metric_learn/sdml.py index 944739f2..38c50955 100644 --- a/metric_learn/sdml.py +++ b/metric_learn/sdml.py @@ -211,14 +211,12 @@ class SDML(_BaseSDML, _PairsClassifierMixin): References ---------- + .. [1] Qi et al. `An efficient sparse metric learning in high-dimensional + space via L1-penalized log-determinant regularization + `_. + ICML 2009. - .. [1] Qi et al. - An efficient sparse metric learning in high-dimensional space via - L1-penalized log-determinant regularization. ICML 2009. - http://lms.comp.nus.edu.sg/sites/default/files/publication\ --attachments/icml09-guojun.pdf - - .. [2] Adapted from https://gist.github.com/kcarnold/5439945 + .. [2] Code adapted from https://gist.github.com/kcarnold/5439945 """ def fit(self, pairs, y, calibration_params=None):