From 8679fdc91e266634843098a4352f2f0020558cb8 Mon Sep 17 00:00:00 2001 From: mvargas33 Date: Thu, 14 Oct 2021 13:16:56 +0200 Subject: [PATCH 01/14] Update API names, unuse depretaed html4 --- doc/conf.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/doc/conf.py b/doc/conf.py index 94263c7a..e0f77fc0 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -39,7 +39,7 @@ htmlhelp_basename = 'metric-learndoc' # Option to only need single backticks to refer to symbols -default_role = 'any' +#default_role = 'any' # Option to hide doctests comments in the documentation (like # doctest: # +NORMALIZE_WHITESPACE for instance) @@ -69,7 +69,7 @@ # Switch to old behavior with html4, for a good display of references, # as described in https://github.com/sphinx-doc/sphinx/issues/6705 -html4_writer = True +#html4_writer = True # Temporary work-around for spacing problem between parameter and parameter @@ -79,8 +79,8 @@ # In an ideal world, this would get fixed in this PR: # https://github.com/readthedocs/sphinx_rtd_theme/pull/747/files def setup(app): - app.add_javascript('js/copybutton.js') - app.add_stylesheet("basic.css") + app.add_js_file('js/copybutton.js') + app.add_css_file("basic.css") # Remove matplotlib agg warnings from generated doc when using plt.show From 8ea2c52fad2e174b7dd259d4f267230cd5549e57 Mon Sep 17 00:00:00 2001 From: mvargas33 Date: Thu, 14 Oct 2021 13:51:25 +0200 Subject: [PATCH 02/14] Fixes a lot of warning. Add Methods doctree --- doc/conf.py | 4 +- metric_learn/base_metric.py | 78 ++++++++++++++++++++++++++++++++++++- metric_learn/constraints.py | 25 +++++++++--- metric_learn/covariance.py | 5 +++ metric_learn/itml.py | 10 +++++ metric_learn/lfda.py | 5 +++ metric_learn/lmnn.py | 5 +++ metric_learn/lsml.py | 10 +++++ metric_learn/mlkr.py | 5 +++ metric_learn/mmc.py | 10 +++++ metric_learn/nca.py | 5 +++ metric_learn/rca.py | 11 ++++++ metric_learn/scml.py | 10 +++++ metric_learn/sdml.py | 10 +++++ 14 files changed, 183 insertions(+), 10 deletions(-) diff --git a/doc/conf.py b/doc/conf.py index e0f77fc0..ef683330 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -39,7 +39,7 @@ htmlhelp_basename = 'metric-learndoc' # Option to only need single backticks to refer to symbols -#default_role = 'any' +# default_role = 'any' # Option to hide doctests comments in the documentation (like # doctest: # +NORMALIZE_WHITESPACE for instance) @@ -69,7 +69,7 @@ # Switch to old behavior with html4, for a good display of references, # as described in https://github.com/sphinx-doc/sphinx/issues/6705 -#html4_writer = True +# html4_writer = True # Temporary work-around for spacing problem between parameter and parameter diff --git a/metric_learn/base_metric.py b/metric_learn/base_metric.py index 21506011..3619e724 100644 --- a/metric_learn/base_metric.py +++ b/metric_learn/base_metric.py @@ -20,6 +20,15 @@ class BaseMetricLearner(BaseEstimator, metaclass=ABCMeta): preprocessor : array-like, shape=(n_samples, n_features) or callable The preprocessor to call to get tuples from indices. If array-like, tuples will be gotten like this: X[indices]. + + Methods + ------- + score_pairs: + Returns the score between pairs. + + get_metric: + Returns a function that takes as input two 1D arrays and outputs the + learned metric score on these two points. """ def __init__(self, preprocessor=None): @@ -144,7 +153,15 @@ def get_metric(self): class MetricTransformer(metaclass=ABCMeta): + """ + Base class for all learners that can transform data into a new space + with the metric learned. + Methods + ------- + transform: + Applies the metric transformation. + """ @abstractmethod def transform(self, X): """Applies the metric transformation. @@ -179,6 +196,21 @@ class MahalanobisMixin(BaseMetricLearner, MetricTransformer, ---------- components_ : `numpy.ndarray`, shape=(n_components, n_features) The learned linear transformation ``L``. + + Methods + ------- + score_pairs: + Returns the learned Mahalanobis distance between pairs. + + transform: + Embeds data points in the learned linear embedding space. + + get_metric: + Returns a function that takes as input two 1D arrays and outputs the + learned metric score on these two points. + + get_mahalanobis_matrix: + Returns a copy of the Mahalanobis matrix learned by the metric learner. """ def score_pairs(self, pairs): @@ -305,6 +337,24 @@ class _PairsClassifierMixin(BaseMetricLearner): If the distance metric between two points is lower than this threshold, points will be classified as similar, otherwise they will be classified as dissimilar. + + Methods + ------- + predict: + Predicts the learned metric between input pairs. (For now it just + calls decision function). + + decision_function: + Returns the decision function used to classify the pairs. + + score: + Computes score of pairs similarity prediction. + + set_threshold: + Sets the threshold of the metric learner to the given value `threshold`. + + calibrate_threshold: + Decision threshold calibration for pairwise binary classification. """ _tuple_size = 2 # number of points in a tuple, 2 for pairs @@ -579,7 +629,19 @@ def _validate_calibration_params(strategy='accuracy', min_rate=None, class _TripletsClassifierMixin(BaseMetricLearner): - """Base class for triplets learners. + """ + Base class for triplets learners. + + Methods + ------- + predict: + Predicts the ordering between sample distances in input triplets. + + decision_function: + Predicts differences between sample distances in input triplets. + + score: + Computes score on input triplets. """ _tuple_size = 3 # number of points in a tuple, 3 for triplets @@ -663,7 +725,19 @@ def score(self, triplets): class _QuadrupletsClassifierMixin(BaseMetricLearner): - """Base class for quadruplets learners. + """ + Base class for quadruplets learners. + + Methods + ------- + predict: + Predicts the ordering between sample distances in input quadruplets. + + decision_function: + Predicts differences between sample distances in input quadruplets. + + score: + Computes score on input quadruplets. """ _tuple_size = 4 # number of points in a tuple, 4 for quadruplets diff --git a/metric_learn/constraints.py b/metric_learn/constraints.py index 2d86b819..da504178 100644 --- a/metric_learn/constraints.py +++ b/metric_learn/constraints.py @@ -25,6 +25,17 @@ class Constraints(object): ---------- partial_labels : `numpy.ndarray` of ints, shape=(n_samples,) Array of labels, with -1 indicating unknown label. + + Methods + ------- + positive_negative_pairs: + Generates positive pairs and negative pairs from labeled data. + + generate_knntriplets: + Generates triplets from labeled data. + + chunks: + Generates chunks from labeled data. """ def __init__(self, partial_labels): @@ -95,12 +106,14 @@ def generate_knntriplets(self, X, k_genuine, k_impostor): Parameters ---------- - X : (n x d) matrix - Input data, where each row corresponds to a single instance. - k_genuine : int - Number of neighbors of the same class to be taken into account. - k_impostor : int - Number of neighbors of different classes to be taken into account. + X : (n x d) matrix + Input data, where each row corresponds to a single instance. + + k_genuine : int + Number of neighbors of the same class to be taken into account. + + k_impostor : int + Number of neighbors of different classes to be taken into account. Returns ------- diff --git a/metric_learn/covariance.py b/metric_learn/covariance.py index 3b218e6d..7625fbfd 100644 --- a/metric_learn/covariance.py +++ b/metric_learn/covariance.py @@ -27,6 +27,11 @@ class Covariance(MahalanobisMixin, TransformerMixin): The linear transformation ``L`` deduced from the learned Mahalanobis metric (See function `components_from_metric`.) + Methods + ------- + fit: + Calculates the covariance matrix of the input data. + Examples -------- >>> from metric_learn import Covariance diff --git a/metric_learn/itml.py b/metric_learn/itml.py index 43872b60..b0231b69 100644 --- a/metric_learn/itml.py +++ b/metric_learn/itml.py @@ -181,6 +181,11 @@ class ITML(_BaseITML, _PairsClassifierMixin): points will be classified as similar, otherwise they will be classified as dissimilar. + Methods + ------- + fit: + Learn the ITML model. + Examples -------- >>> from metric_learn import ITML @@ -321,6 +326,11 @@ class ITML_Supervised(_BaseITML, TransformerMixin): The linear transformation ``L`` deduced from the learned Mahalanobis metric (See function `components_from_metric`.) + Methods + ------- + fit: + Create constraints from labels and learn the ITML model. + Examples -------- >>> from metric_learn import ITML_Supervised diff --git a/metric_learn/lfda.py b/metric_learn/lfda.py index bfa3275e..3f306aa9 100644 --- a/metric_learn/lfda.py +++ b/metric_learn/lfda.py @@ -52,6 +52,11 @@ class LFDA(MahalanobisMixin, TransformerMixin): components_ : `numpy.ndarray`, shape=(n_components, n_features) The learned linear transformation ``L``. + Methods + ------- + fit: + Fit the LFDA model. + Examples -------- diff --git a/metric_learn/lmnn.py b/metric_learn/lmnn.py index 8bdc4bf0..f6eb6f5c 100644 --- a/metric_learn/lmnn.py +++ b/metric_learn/lmnn.py @@ -107,6 +107,11 @@ class LMNN(MahalanobisMixin, TransformerMixin): components_ : `numpy.ndarray`, shape=(n_components, n_features) The learned linear transformation ``L``. + Methods + ------- + fit: + Fit the LMNN model. + Examples -------- diff --git a/metric_learn/lsml.py b/metric_learn/lsml.py index 28f65ce7..cb3dbc50 100644 --- a/metric_learn/lsml.py +++ b/metric_learn/lsml.py @@ -171,6 +171,11 @@ class LSML(_BaseLSML, _QuadrupletsClassifierMixin): The linear transformation ``L`` deduced from the learned Mahalanobis metric (See function `components_from_metric`.) + Methods + ------- + fit : + Learn the LSML model. + Examples -------- >>> from metric_learn import LSML @@ -282,6 +287,11 @@ class LSML_Supervised(_BaseLSML, TransformerMixin): prior. In any case, `random_state` is also used to randomly sample constraints from labels. + Methods + ------- + fit : + Create constraints from labels and learn the LSML model. + Examples -------- >>> from metric_learn import LSML_Supervised diff --git a/metric_learn/mlkr.py b/metric_learn/mlkr.py index 01d185e7..9d504ae2 100644 --- a/metric_learn/mlkr.py +++ b/metric_learn/mlkr.py @@ -90,6 +90,11 @@ class MLKR(MahalanobisMixin, TransformerMixin): components_ : `numpy.ndarray`, shape=(n_components, n_features) The learned linear transformation ``L``. + Methods + ------- + fit: + Fit MLKR model. + Examples -------- diff --git a/metric_learn/mmc.py b/metric_learn/mmc.py index 1ff30b1e..b4255c80 100644 --- a/metric_learn/mmc.py +++ b/metric_learn/mmc.py @@ -391,6 +391,11 @@ class MMC(_BaseMMC, _PairsClassifierMixin): points will be classified as similar, otherwise they will be classified as dissimilar. + Methods + ------- + fit: + Learn the MMC model. + Examples -------- >>> from metric_learn import MMC @@ -518,6 +523,11 @@ class MMC_Supervised(_BaseMMC, TransformerMixin): Mahalanobis matrix. In any case, `random_state` is also used to randomly sample constraints from labels. + Methods + ------- + fit: + Create constraints from labels and learn the MMC model. + Examples -------- >>> from metric_learn import MMC_Supervised diff --git a/metric_learn/nca.py b/metric_learn/nca.py index 7b4423d3..932661e0 100644 --- a/metric_learn/nca.py +++ b/metric_learn/nca.py @@ -90,6 +90,11 @@ class NCA(MahalanobisMixin, TransformerMixin): transformation. If ``init='pca'``, ``random_state`` is passed as an argument to PCA when initializing the transformation. + Methods + ------- + fit: + Fit NCA model. + Examples -------- diff --git a/metric_learn/rca.py b/metric_learn/rca.py index 34f7f3ff..a599caf9 100644 --- a/metric_learn/rca.py +++ b/metric_learn/rca.py @@ -46,6 +46,11 @@ class RCA(MahalanobisMixin, TransformerMixin): The preprocessor to call to get tuples from indices. If array-like, tuples will be formed like this: X[indices]. + Methods + ------- + fit: + Learn the RCA model. + Examples -------- >>> from metric_learn import RCA @@ -156,6 +161,12 @@ class RCA_Supervised(RCA): A pseudo random number generator object or a seed for it if int. It is used to randomly sample constraints from labels. + Methods + ------- + fit: + Create constraints from labels and learn the RCA model. + Needs num_constraints specified in constructor. + Examples -------- >>> from metric_learn import RCA_Supervised diff --git a/metric_learn/scml.py b/metric_learn/scml.py index c3fde272..c92193aa 100644 --- a/metric_learn/scml.py +++ b/metric_learn/scml.py @@ -361,6 +361,11 @@ class SCML(_BaseSCML, _TripletsClassifierMixin): The linear transformation ``L`` deduced from the learned Mahalanobis metric (See function `_components_from_basis_weights`.) + Methods + ------- + fit: + Learn the SCML model. + Examples -------- >>> from metric_learn import SCML @@ -471,6 +476,11 @@ class SCML_Supervised(_BaseSCML, TransformerMixin): The linear transformation ``L`` deduced from the learned Mahalanobis metric (See function `_components_from_basis_weights`.) + Methods + ------- + fit: + Create constraints from labels and learn the SCML model. + Examples -------- >>> from metric_learn import SCML diff --git a/metric_learn/sdml.py b/metric_learn/sdml.py index a0736ffa..4f04e579 100644 --- a/metric_learn/sdml.py +++ b/metric_learn/sdml.py @@ -170,6 +170,11 @@ class SDML(_BaseSDML, _PairsClassifierMixin): points will be classified as similar, otherwise they will be classified as dissimilar. + Methods + ------- + fit: + Learn the SDML model. + Examples -------- >>> from metric_learn import SDML_Supervised @@ -285,6 +290,11 @@ class SDML_Supervised(_BaseSDML, TransformerMixin): The linear transformation ``L`` deduced from the learned Mahalanobis metric (See function `components_from_metric`.) + Methods + ------- + fit: + Create constraints from labels and learn the SDML model. + See Also -------- metric_learn.SDML : The original weakly-supervised algorithm From 99ce4c5ce5060d29532817f022ccf397b01f2e58 Mon Sep 17 00:00:00 2001 From: mvargas33 Date: Thu, 14 Oct 2021 16:00:12 +0200 Subject: [PATCH 03/14] More warnings solved --- doc/supervised.rst | 11 +++++++++++ doc/unsupervised.rst | 2 ++ doc/weakly_supervised.rst | 23 +++++++++++++++++++---- examples/plot_metric_learning_examples.py | 6 +++--- metric_learn/scml.py | 8 ++++---- 5 files changed, 39 insertions(+), 11 deletions(-) diff --git a/doc/supervised.rst b/doc/supervised.rst index c6d8b68b..c8b1e005 100644 --- a/doc/supervised.rst +++ b/doc/supervised.rst @@ -142,6 +142,8 @@ indicates :math:`\mathbf{x}_{i}, \mathbf{x}_{j}` belong to different classes, .. topic:: Example Code: + A basic usage of this model with the Iris dataset from scikit-learn. + :: import numpy as np @@ -206,6 +208,8 @@ the sum of probability of being correctly classified: .. topic:: Example Code: + A basic usage of this model with the Iris dataset from scikit-learn. + :: import numpy as np @@ -279,6 +283,8 @@ same class are not imposed to be close. .. topic:: Example Code: + A basic usage of this model with the Iris dataset from scikit-learn. + :: import numpy as np @@ -353,6 +359,8 @@ calculating a weighted average of all the training samples: .. topic:: Example Code: + A basic usage of this model with the Iris dataset from scikit-learn. + :: from metric_learn import MLKR @@ -407,6 +415,9 @@ last points should be less similar than the two first points). .. topic:: Example Code: + A basic usage of supervied versions of weakly supervied algorithms + with the Iris dataset from scikit-learn. + :: from metric_learn import MMC_Supervised diff --git a/doc/unsupervised.rst b/doc/unsupervised.rst index 1191e805..50697599 100644 --- a/doc/unsupervised.rst +++ b/doc/unsupervised.rst @@ -22,6 +22,8 @@ Whitening_transformation>`_). .. topic:: Example Code: + A basic usage of this model with the Iris dataset from scikit-learn. + :: from metric_learn import Covariance diff --git a/doc/weakly_supervised.rst b/doc/weakly_supervised.rst index 174210b8..59dbb162 100644 --- a/doc/weakly_supervised.rst +++ b/doc/weakly_supervised.rst @@ -63,6 +63,7 @@ in a tuple (2 for pairs, 3 for triplets for instance), and `n_features` is the number of features of each point. .. topic:: Example: + Here is an artificial dataset of 4 pairs of 2 points of 3 features each: >>> import numpy as np @@ -94,7 +95,9 @@ would be to keep the dataset of points `X` aside, and just represent tuples as a collection of tuples of *indices* from the points in `X`. Since we loose the feature dimension there, the resulting array is 2D. -.. topic:: Example: An equivalent representation of the above pairs would be: +.. topic:: Example: + + An equivalent representation of the above pairs would be: >>> X = np.array([[-0.12, -1.21, -0.20], >>> [+0.05, -0.19, -0.05], @@ -402,6 +405,8 @@ is the prior distance metric, set to identity matrix by default, .. topic:: Example Code: + A basic usage of this model with pairs: + :: from metric_learn import ITML @@ -460,6 +465,8 @@ is the off-diagonal L1 norm. .. topic:: Example Code: + A basic usage of this model with pairs: + :: from metric_learn import SDML @@ -488,7 +495,7 @@ is the off-diagonal L1 norm. .. _rca: :py:class:`RCA ` -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Relative Components Analysis (:py:class:`RCA `) @@ -514,6 +521,8 @@ as the Mahalanobis matrix. .. topic:: Example Code: + A basic usage of this model with pairs: + :: from metric_learn import RCA @@ -568,6 +577,8 @@ points, while constrains the sum of distances between dissimilar points: .. topic:: Example Code: + A basic usage of this model with pairs: + :: from metric_learn import MMC @@ -736,6 +747,8 @@ where :math:`[\cdot]_+` is the hinge loss. .. topic:: Example Code: + A basic usage of this model with triplets: + :: from metric_learn import SCML @@ -754,8 +767,8 @@ where :math:`[\cdot]_+` is the hinge loss. `_. \ (AAAI), 2014. - .. [2] Adapted from original \ - `Matlab implementation.`_. + .. [2] Adapted from original `Matlab implementation. \ + `_. .. _learning_on_quadruplets: @@ -929,6 +942,8 @@ by default, :math:`D_{ld}(\mathbf{\cdot, \cdot})` is the LogDet divergence: .. topic:: Example Code: + A basic usage of this model with quadruplets: + :: from metric_learn import LSML diff --git a/examples/plot_metric_learning_examples.py b/examples/plot_metric_learning_examples.py index 71229554..f00f838c 100644 --- a/examples/plot_metric_learning_examples.py +++ b/examples/plot_metric_learning_examples.py @@ -35,9 +35,9 @@ # We will be using a synthetic dataset to illustrate the plotting, # using the function `sklearn.datasets.make_classification` from # scikit-learn. The dataset will contain: -# - 100 points in 3 classes with 2 clusters per class -# - 5 features, among which 3 are informative (correlated with the class -# labels) and two are random noise with large magnitude +# - 100 points in 3 classes with 2 clusters per class +# - 5 features, among which 3 are informative (correlated with the class +# labels) and two are random noise with large magnitude X, y = make_classification(n_samples=100, n_classes=3, n_clusters_per_class=2, n_informative=3, class_sep=4., n_features=5, diff --git a/metric_learn/scml.py b/metric_learn/scml.py index c92193aa..86673603 100644 --- a/metric_learn/scml.py +++ b/metric_learn/scml.py @@ -382,8 +382,8 @@ class SCML(_BaseSCML, _TripletsClassifierMixin): `_. \ (AAAI), 2014. - .. [2] Adapted from original \ - `Matlab implementation.`_. + .. [2] Adapted from original `Matlab implementation. \ + `_. See Also -------- @@ -497,8 +497,8 @@ class SCML_Supervised(_BaseSCML, TransformerMixin): `_. \ (AAAI), 2014. - .. [2] Adapted from original \ - `Matlab implementation.`_. + .. [2] Adapted from original `Matlab implementation. \ + `_. See Also -------- From 2ec95f70e5cbd40a766b5cb9adc3428b4d38f9f0 Mon Sep 17 00:00:00 2001 From: mvargas33 Date: Thu, 14 Oct 2021 16:20:03 +0200 Subject: [PATCH 04/14] Fix docs dependencies --- setup.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 255671a2..23392077 100755 --- a/setup.py +++ b/setup.py @@ -68,7 +68,8 @@ 'scikit-learn>=0.21.3', ], extras_require=dict( - docs=['sphinx', 'shinx_rtd_theme', 'numpydoc'], + docs=['sphinx', 'sphinx_rtd_theme', 'numpydoc', 'sphinx-gallery', + 'matplotlib'], demo=['matplotlib'], sdml=['skggm>=0.2.9'] ), From fc584113b5e1a008bcd5600b1b2abb92de2f1ba9 Mon Sep 17 00:00:00 2001 From: mvargas33 Date: Fri, 29 Oct 2021 14:52:23 +0200 Subject: [PATCH 05/14] New style for Example Code and References --- doc/_static/css/styles.css | 23 ++++++++ doc/conf.py | 1 + doc/supervised.rst | 67 +++++++++++------------ doc/unsupervised.rst | 11 ++-- doc/weakly_supervised.rst | 102 +++++++++++++++++++----------------- metric_learn/base_metric.py | 2 +- metric_learn/itml.py | 2 +- metric_learn/lfda.py | 2 +- metric_learn/rca.py | 2 +- 9 files changed, 123 insertions(+), 89 deletions(-) create mode 100644 doc/_static/css/styles.css diff --git a/doc/_static/css/styles.css b/doc/_static/css/styles.css new file mode 100644 index 00000000..324dee6b --- /dev/null +++ b/doc/_static/css/styles.css @@ -0,0 +1,23 @@ +.hatnote { + border-color: #e1e4e5 ; + border-style: solid ; + border-width: 1px ; + font-size: x-small ; + font-style: italic ; + margin-left: auto ; + margin-right: auto ; + margin-bottom: 24px; + padding: 12px; +} +.hatnote-gray { + background-color: #f5f5f5 +} +.hatnote li { + list-style-type: square; + margin-left: 12px !important; +} +.hatnote ul { + list-style-type: square; + margin-left: 0px !important; + margin-bottom: 0px !important; +} \ No newline at end of file diff --git a/doc/conf.py b/doc/conf.py index ef683330..aa7ce031 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -81,6 +81,7 @@ def setup(app): app.add_js_file('js/copybutton.js') app.add_css_file("basic.css") + app.add_css_file("css/styles.css") # Remove matplotlib agg warnings from generated doc when using plt.show diff --git a/doc/supervised.rst b/doc/supervised.rst index 835983bb..acd687d3 100644 --- a/doc/supervised.rst +++ b/doc/supervised.rst @@ -152,9 +152,9 @@ neighbors (with same labels) of :math:`\mathbf{x}_{i}`, :math:`y_{ij}=0` indicates :math:`\mathbf{x}_{i}, \mathbf{x}_{j}` belong to different classes, :math:`[\cdot]_+=\max(0, \cdot)` is the Hinge loss. -.. topic:: Example Code: +.. rubric:: Example Code - A basic usage of this model with the Iris dataset from scikit-learn. +A basic usage of this model with the Iris dataset from scikit-learn. :: @@ -169,15 +169,15 @@ indicates :math:`\mathbf{x}_{i}, \mathbf{x}_{j}` belong to different classes, lmnn = LMNN(k=5, learn_rate=1e-6) lmnn.fit(X, Y, verbose=False) -.. topic:: References: +.. rubric:: References - .. [1] Weinberger et al. `Distance Metric Learning for Large Margin - Nearest Neighbor Classification - `_. - JMLR 2009 - .. [2] `Wikipedia entry on Large Margin Nearest Neighbor `_ - +.. container:: hatnote hatnote-gray + + - Weinberger et al. `Distance Metric Learning for Large Margin Nearest Neighbor Classification `_. JMLR 2009. + + - `Wikipedia entry on Large Margin Nearest Neighbor `_. + .. _nca: @@ -218,9 +218,9 @@ the sum of probability of being correctly classified: \mathbf{L} = \text{argmax}\sum_i p_i -.. topic:: Example Code: +.. rubric:: Example Code - A basic usage of this model with the Iris dataset from scikit-learn. +A basic usage of this model with the Iris dataset from scikit-learn. :: @@ -235,13 +235,14 @@ the sum of probability of being correctly classified: nca = NCA(max_iter=1000) nca.fit(X, Y) -.. topic:: References: +.. rubric:: References + + +.. container:: hatnote hatnote-gray - .. [1] Goldberger et al. - `Neighbourhood Components Analysis `_. - NIPS 2005 + - Goldberger et al. `Neighbourhood Components Analysis `_. NIPS 2005. - .. [2] `Wikipedia entry on Neighborhood Components Analysis `_ + - `Wikipedia entry on Neighborhood Components Analysis `_. .. _lfda: @@ -293,9 +294,9 @@ nearby data pairs in the same class are made close and the data pairs in different classes are separated from each other; far apart data pairs in the same class are not imposed to be close. -.. topic:: Example Code: +.. rubric:: Example Code - A basic usage of this model with the Iris dataset from scikit-learn. +A basic usage of this model with the Iris dataset from scikit-learn. :: @@ -315,15 +316,14 @@ same class are not imposed to be close. To work around this, fit instances of this class to data once, then keep the instance around to do transformations. -.. topic:: References: +.. rubric:: References - .. [1] Sugiyama. `Dimensionality Reduction of Multimodal Labeled Data by Local - Fisher Discriminant Analysis `_. - JMLR 2007 - .. [2] Tang. `Local Fisher Discriminant Analysis on Beer Style Clustering - `_. +.. container:: hatnote hatnote-gray + + - Sugiyama. `Dimensionality Reduction of Multimodal Labeled Data by Local Fisher Discriminant Analysis `_. JMLR 2007. + + - Tang. `Local Fisher Discriminant Analysis on Beer Style Clustering `_. .. _mlkr: @@ -369,9 +369,9 @@ calculating a weighted average of all the training samples: \hat{y}_i = \frac{\sum_{j\neq i}y_jk_{ij}}{\sum_{j\neq i}k_{ij}} -.. topic:: Example Code: +.. rubric:: Example Code - A basic usage of this model with the Iris dataset from scikit-learn. +A basic usage of this model with the Iris dataset from scikit-learn. :: @@ -385,10 +385,12 @@ calculating a weighted average of all the training samples: mlkr = MLKR() mlkr.fit(X, Y) -.. topic:: References: +.. rubric:: References + + +.. container:: hatnote hatnote-gray - .. [1] Weinberger et al. `Metric Learning for Kernel Regression `_. AISTATS 2007 + - Weinberger et al. `Metric Learning for Kernel Regression `_. AISTATS 2007. .. _supervised_version: @@ -425,10 +427,9 @@ quadruplets, where for each quadruplet the two first points are from the same class, and the two last points are from a different class (so indeed the two last points should be less similar than the two first points). -.. topic:: Example Code: +.. rubric:: Example Code - A basic usage of supervied versions of weakly supervied algorithms - with the Iris dataset from scikit-learn. +A basic usage of supervied versions of weakly supervied algorithms with the Iris dataset from scikit-learn. :: diff --git a/doc/unsupervised.rst b/doc/unsupervised.rst index 50697599..62ebfae2 100644 --- a/doc/unsupervised.rst +++ b/doc/unsupervised.rst @@ -20,9 +20,9 @@ It can be used for ZCA whitening of the data (see the Wikipedia page of `whitening transformation `_). -.. topic:: Example Code: +.. rubric:: Example Code - A basic usage of this model with the Iris dataset from scikit-learn. +A basic usage of this model with the Iris dataset from scikit-learn. :: @@ -34,6 +34,9 @@ Whitening_transformation>`_). cov = Covariance().fit(iris) x = cov.transform(iris) -.. topic:: References: +.. rubric:: References - .. [1] On the Generalized Distance in Statistics, P.C.Mahalanobis, 1936 \ No newline at end of file + +.. container:: hatnote hatnote-gray + + - On the Generalized Distance in Statistics, P.C.Mahalanobis, 1936. \ No newline at end of file diff --git a/doc/weakly_supervised.rst b/doc/weakly_supervised.rst index 2d8773c5..ae10b059 100644 --- a/doc/weakly_supervised.rst +++ b/doc/weakly_supervised.rst @@ -62,9 +62,9 @@ The most intuitive way to represent tuples is to provide the algorithm with a in a tuple (2 for pairs, 3 for triplets for instance), and `n_features` is the number of features of each point. -.. topic:: Example: +.. rubric:: Example Code - Here is an artificial dataset of 4 pairs of 2 points of 3 features each: +Here is an artificial dataset of 4 pairs of 2 points of 3 features each: >>> import numpy as np >>> tuples = np.array([[[-0.12, -1.21, -0.20], @@ -95,9 +95,9 @@ would be to keep the dataset of points `X` aside, and just represent tuples as a collection of tuples of *indices* from the points in `X`. Since we loose the feature dimension there, the resulting array is 2D. -.. topic:: Example: +.. rubric:: Example Code - An equivalent representation of the above pairs would be: +An equivalent representation of the above pairs would be: >>> X = np.array([[-0.12, -1.21, -0.20], >>> [+0.05, -0.19, -0.05], @@ -413,9 +413,9 @@ for similar and dissimilar pairs respectively, and :math:`\mathbf{M}_0` is the prior distance metric, set to identity matrix by default, :math:`D_{\ell \mathrm{d}}(\cdot)` is the log determinant. -.. topic:: Example Code: +.. rubric:: Example Code - A basic usage of this model with pairs: +A basic usage of this model with pairs: :: @@ -434,11 +434,14 @@ is the prior distance metric, set to identity matrix by default, itml = ITML() itml.fit(pairs, y) -.. topic:: References: +.. rubric:: References - .. [1] Jason V. Davis, et al. `Information-theoretic Metric Learning `_. ICML 2007 - .. [2] Adapted from Matlab code at http://www.cs.utexas.edu/users/pjain/itml/ +.. container:: hatnote hatnote-gray + + - Jason V. Davis, et al. `Information-theoretic Metric Learning `_. ICML 2007. + + - Adapted from Matlab code at http://www.cs.utexas.edu/users/pjain/itml/ . .. _sdml: @@ -473,9 +476,9 @@ the sums of the row elements of :math:`\mathbf{K}`., :math:`||\cdot||_{1, off}` is the off-diagonal L1 norm. -.. topic:: Example Code: +.. rubric:: Example Code - A basic usage of this model with pairs: +A basic usage of this model with pairs: :: @@ -493,14 +496,14 @@ is the off-diagonal L1 norm. sdml = SDML() sdml.fit(pairs, y) -.. topic:: References: +.. rubric:: References + + +.. container:: hatnote hatnote-gray - .. [1] Qi et al. - `An efficient sparse metric learning in high-dimensional space via - L1-penalized log-determinant regularization `_. - ICML 2009. + - Qi et al. `An efficient sparse metric learning in high-dimensional space via L1-penalized log-determinant regularization `_. ICML 2009. - .. [2] Code adapted from https://gist.github.com/kcarnold/5439945 + - Code adapted from https://gist.github.com/kcarnold/5439945 . .. _rca: @@ -529,9 +532,9 @@ where chunklet :math:`j` consists of :math:`\{\mathbf{x}_{ji}\}_{i=1}^{n_j}` with a mean :math:`\hat{m}_j`. The inverse of :math:`\mathbf{C}^{-1}` is used as the Mahalanobis matrix. -.. topic:: Example Code: +.. rubric:: Example Code - A basic usage of this model with pairs: +A basic usage of this model with pairs: :: @@ -546,15 +549,16 @@ as the Mahalanobis matrix. rca = RCA() rca.fit(X, chunks) -.. topic:: References: +.. rubric:: References - .. [1] Shental et al. `Adjustment learning and relevant component analysis - `_. ECCV 2002 - .. [2] Bar-Hillel et al. `Learning distance functions using equivalence relations `_. ICML 2003 +.. container:: hatnote hatnote-gray - .. [3] Bar-Hillel et al. `Learning a Mahalanobis metric from equivalence constraints `_. JMLR 2005 + - Shental et al. `Adjustment learning and relevant component analysis `_. ECCV 2002. + + - Bar-Hillel et al. `Learning distance functions using equivalence relations `_. ICML 2003. + + - Bar-Hillel et al. `Learning a Mahalanobis metric from equivalence constraints `_. JMLR 2005. .. _mmc: @@ -585,9 +589,9 @@ points, while constrains the sum of distances between dissimilar points: \qquad \qquad \text{s.t.} \qquad \sum_{(\mathbf{x}_i, \mathbf{x}_j) \in D} d^2_{\mathbf{M}}(\mathbf{x}_i, \mathbf{x}_j) \geq 1 -.. topic:: Example Code: +.. rubric:: Example Code - A basic usage of this model with pairs: +A basic usage of this model with pairs: :: @@ -605,13 +609,14 @@ points, while constrains the sum of distances between dissimilar points: mmc = MMC() mmc.fit(pairs, y) -.. topic:: References: +.. rubric:: References + - .. [1] Xing et al. `Distance metric learning with application to clustering with - side-information `_. NIPS 2002 - .. [2] Adapted from Matlab code http://www.cs.cmu.edu/%7Eepxing/papers/Old_papers/code_Metric_online.tar.gz +.. container:: hatnote hatnote-gray + + - Xing et al. `Distance metric learning with application to clustering with side-information `_. NIPS 2002. + + - Adapted from Matlab code http://www.cs.cmu.edu/%7Eepxing/papers/Old_papers/code_Metric_online.tar.gz . .. _learning_on_triplets: @@ -755,9 +760,9 @@ is added to yield a sparse combination. The formulation is the following: where :math:`[\cdot]_+` is the hinge loss. -.. topic:: Example Code: +.. rubric:: Example Code - A basic usage of this model with triplets: +A basic usage of this model with triplets: :: @@ -771,14 +776,14 @@ where :math:`[\cdot]_+` is the hinge loss. scml = SCML() scml.fit(triplets) -.. topic:: References: +.. rubric:: References + - .. [1] Y. Shi, A. Bellet and F. Sha. `Sparse Compositional Metric Learning. - `_. \ - (AAAI), 2014. +.. container:: hatnote hatnote-gray - .. [2] Adapted from original `Matlab implementation. \ - `_. + - Y. Shi, A. Bellet and F. Sha. `Sparse Compositional Metric Learning. `_. (AAAI), 2014. + + - Adapted from original `Matlab implementation. `_. .. _learning_on_quadruplets: @@ -950,9 +955,9 @@ by default, :math:`D_{ld}(\mathbf{\cdot, \cdot})` is the LogDet divergence: D_{ld}(\mathbf{M, M_0}) = \text{tr}(\mathbf{MM_0}) − \text{logdet} (\mathbf{M}) -.. topic:: Example Code: +.. rubric:: Example Code - A basic usage of this model with quadruplets: +A basic usage of this model with quadruplets: :: @@ -969,12 +974,13 @@ by default, :math:`D_{ld}(\mathbf{\cdot, \cdot})` is the LogDet divergence: lsml = LSML() lsml.fit(quadruplets) -.. topic:: References: +.. rubric:: References + + +.. container:: hatnote hatnote-gray - .. [1] Liu et al. - `Metric Learning from Relative Comparisons by Minimizing Squared - Residual `_. ICDM 2012 + - Liu et al. `Metric Learning from Relative Comparisons by Minimizing Squared Residual `_. ICDM 2012. - .. [2] Code adapted from https://gist.github.com/kcarnold/5439917 + - Code adapted from https://gist.github.com/kcarnold/5439917 . diff --git a/metric_learn/base_metric.py b/metric_learn/base_metric.py index 57a69348..37186389 100644 --- a/metric_learn/base_metric.py +++ b/metric_learn/base_metric.py @@ -676,7 +676,7 @@ def calibrate_threshold(self, pairs_valid, y_valid, strategy='accuracy', evaluation tool in clinical medicine, MH Zweig, G Campbell - Clinical chemistry, 1993 - .. [2] most of the code of this function is from scikit-learn's PR #10117 + .. [2] Most of the code of this function is from scikit-learn's PR #10117 See Also -------- diff --git a/metric_learn/itml.py b/metric_learn/itml.py index fb915294..c3a83439 100644 --- a/metric_learn/itml.py +++ b/metric_learn/itml.py @@ -203,7 +203,7 @@ class ITML(_BaseITML, _PairsClassifierMixin): ---------- .. [1] Jason V. Davis, et al. `Information-theoretic Metric Learning `_. ICML 2007. + /DavisKJSD07_ICML.pdf>`_. ICML 2007. """ def fit(self, pairs, y, bounds=None, calibration_params=None): diff --git a/metric_learn/lfda.py b/metric_learn/lfda.py index 3f306aa9..a6954332 100644 --- a/metric_learn/lfda.py +++ b/metric_learn/lfda.py @@ -70,7 +70,7 @@ class LFDA(MahalanobisMixin, TransformerMixin): >>> lfda.fit(X, Y) References - ------------------ + ---------- .. [1] Masashi Sugiyama. `Dimensionality Reduction of Multimodal Labeled Data by Local Fisher Discriminant Analysis `_. JMLR 2007. diff --git a/metric_learn/rca.py b/metric_learn/rca.py index 9a27ed5d..360e458a 100644 --- a/metric_learn/rca.py +++ b/metric_learn/rca.py @@ -63,7 +63,7 @@ class RCA(MahalanobisMixin, TransformerMixin): >>> rca.fit(X, chunks) References - ------------------ + ---------- .. [1] Noam Shental, et al. `Adjustment learning and relevant component analysis `_ . From 11477457ad02fa9929ab760a193a2ffe83828d35 Mon Sep 17 00:00:00 2001 From: mvargas33 Date: Tue, 2 Nov 2021 13:48:02 +0100 Subject: [PATCH 06/14] Add all Methods to all classes in docstrings, in alphabetical order --- metric_learn/base_metric.py | 125 +++++++++++++++++++++++++++++------- metric_learn/constraints.py | 8 +-- metric_learn/covariance.py | 32 +++++++++ metric_learn/itml.py | 69 ++++++++++++++++++++ metric_learn/lfda.py | 28 ++++++++ metric_learn/lmnn.py | 28 ++++++++ metric_learn/lsml.py | 64 +++++++++++++++++- metric_learn/mlkr.py | 28 ++++++++ metric_learn/mmc.py | 69 ++++++++++++++++++++ metric_learn/nca.py | 28 ++++++++ metric_learn/rca.py | 56 ++++++++++++++++ metric_learn/scml.py | 62 ++++++++++++++++++ metric_learn/sdml.py | 69 ++++++++++++++++++++ 13 files changed, 639 insertions(+), 27 deletions(-) diff --git a/metric_learn/base_metric.py b/metric_learn/base_metric.py index 37186389..d53050e4 100644 --- a/metric_learn/base_metric.py +++ b/metric_learn/base_metric.py @@ -24,12 +24,24 @@ class BaseMetricLearner(BaseEstimator, metaclass=ABCMeta): Methods ------- - score_pairs: - Returns the score between pairs. - get_metric: Returns a function that takes as input two 1D arrays and outputs the learned metric score on these two points. + + get_params: + Get parameters for this estimator. + + pair_distance: + Returns the (pseudo) distance between pairs, when available. + + pair_score: + Returns the similarity score between pairs of points. + + score_pairs: + Deprecated. Returns the score between pairs. + + set_params: + Set the parameters of this estimator. """ def __init__(self, preprocessor=None): @@ -280,18 +292,30 @@ class MahalanobisMixin(BaseMetricLearner, MetricTransformer, Methods ------- - score_pairs: - Returns the learned Mahalanobis distance between pairs. - - transform: - Embeds data points in the learned linear embedding space. + get_mahalanobis_matrix: + Returns a copy of the Mahalanobis matrix learned by the metric learner. get_metric: Returns a function that takes as input two 1D arrays and outputs the learned metric score on these two points. - get_mahalanobis_matrix: - Returns a copy of the Mahalanobis matrix learned by the metric learner. + get_params: + Get parameters for this estimator. + + pair_distance: + Returns the (pseudo) distance between pairs, when available. + + pair_score: + Returns the similarity score between pairs of points. + + score_pairs: + Deprecated. Returns the learned Mahalanobis distance between pairs. + + set_params: + Set the parameters of this estimator. + + transform: + Embeds data points in the learned linear embedding space. """ def score_pairs(self, pairs): @@ -500,21 +524,40 @@ class _PairsClassifierMixin(BaseMetricLearner): Methods ------- - predict: - Predicts the learned metric between input pairs. (For now it just - calls decision function). + calibrate_threshold: + Decision threshold calibration for pairwise binary classification.\ decision_function: Returns the decision function used to classify the pairs. + get_metric: + Returns a function that takes as input two 1D arrays and outputs the + learned metric score on these two points. + + get_params: + Get parameters for this estimator. + + pair_distance: + Returns the (pseudo) distance between pairs, when available. + + pair_score: + Returns the similarity score between pairs of points. + + predict: + Predicts the learned metric between input pairs. (For now it just + calls decision function). + score: Computes score of pairs similarity prediction. + score_pairs: + Deprecated. Returns the score between pairs. + + set_params: + Set the parameters of this estimator. + set_threshold: Sets the threshold of the metric learner to the given value `threshold`. - - calibrate_threshold: - Decision threshold calibration for pairwise binary classification. """ _tuple_size = 2 # number of points in a tuple, 2 for pairs @@ -794,14 +837,33 @@ class _TripletsClassifierMixin(BaseMetricLearner): Methods ------- - predict: - Predicts the ordering between sample distances in input triplets. - decision_function: Predicts differences between sample distances in input triplets. + get_metric: + Returns a function that takes as input two 1D arrays and outputs the + learned metric score on these two points. + + get_params: + Get parameters for this estimator. + + pair_distance: + Returns the (pseudo) distance between pairs, when available. + + pair_score: + Returns the similarity score between pairs of points. + + predict: + Predicts the ordering between sample distances in input triplets. + score: Computes score on input triplets. + + score_pairs: + Deprecated. Returns the score between pairs. + + set_params: + Set the parameters of this estimator. """ _tuple_size = 3 # number of points in a tuple, 3 for triplets @@ -890,14 +952,33 @@ class _QuadrupletsClassifierMixin(BaseMetricLearner): Methods ------- - predict: - Predicts the ordering between sample distances in input quadruplets. - decision_function: Predicts differences between sample distances in input quadruplets. + get_metric: + Returns a function that takes as input two 1D arrays and outputs the + learned metric score on these two points. + + get_params: + Get parameters for this estimator. + + pair_distance: + Returns the (pseudo) distance between pairs, when available. + + pair_score: + Returns the similarity score between pairs of points. + + predict: + Predicts the ordering between sample distances in input quadruplets. + score: Computes score on input quadruplets. + + score_pairs: + Deprecated. Returns the score between pairs. + + set_params: + Set the parameters of this estimator. """ _tuple_size = 4 # number of points in a tuple, 4 for quadruplets diff --git a/metric_learn/constraints.py b/metric_learn/constraints.py index da504178..2b619a57 100644 --- a/metric_learn/constraints.py +++ b/metric_learn/constraints.py @@ -28,14 +28,14 @@ class Constraints(object): Methods ------- - positive_negative_pairs: - Generates positive pairs and negative pairs from labeled data. + chunks: + Generates chunks from labeled data. generate_knntriplets: Generates triplets from labeled data. - chunks: - Generates chunks from labeled data. + positive_negative_pairs: + Generates positive pairs and negative pairs from labeled data. """ def __init__(self, partial_labels): diff --git a/metric_learn/covariance.py b/metric_learn/covariance.py index 7625fbfd..510eab70 100644 --- a/metric_learn/covariance.py +++ b/metric_learn/covariance.py @@ -32,6 +32,34 @@ class Covariance(MahalanobisMixin, TransformerMixin): fit: Calculates the covariance matrix of the input data. + fit_transform: + Fit to data, then transform it. + + get_mahalanobis_matrix: + Returns a copy of the Mahalanobis matrix learned by the metric learner. + + get_metric: + Returns a function that takes as input two 1D arrays and outputs the + learned metric score on these two points. + + get_params: + Get parameters for this estimator. + + pair_distance: + Returns the (pseudo) distance between pairs, when available. + + pair_score: + Returns the similarity score between pairs of points. + + score_pairs: + Deprecated. Returns the learned Mahalanobis distance between pairs. + + set_params: + Set the parameters of this estimator. + + transform: + Embeds data points in the learned linear embedding space. + Examples -------- >>> from metric_learn import Covariance @@ -47,6 +75,10 @@ def __init__(self, preprocessor=None): def fit(self, X, y=None): """ + Calculates the covariance matrix of the input data. + + Parameters + ---------- X : data matrix, (n x d) y : unused """ diff --git a/metric_learn/itml.py b/metric_learn/itml.py index c3a83439..f0a4c7cf 100644 --- a/metric_learn/itml.py +++ b/metric_learn/itml.py @@ -183,9 +183,50 @@ class ITML(_BaseITML, _PairsClassifierMixin): Methods ------- + calibrate_threshold: + Decision threshold calibration for pairwise binary classification. + + decision_function: + Returns the decision function used to classify the pairs. + fit: Learn the ITML model. + get_mahalanobis_matrix: + Returns a copy of the Mahalanobis matrix learned by the metric learner. + + get_metric: + Returns a function that takes as input two 1D arrays and outputs the + learned metric score on these two points. + + get_params: + Get parameters for this estimator. + + pair_distance: + Returns the (pseudo) distance between pairs, when available. + + pair_score: + Returns the similarity score between pairs of points. + + predict: + Predicts the learned metric between input pairs. (For now it just + calls decision function). + + score: + Computes score of pairs similarity prediction. + + score_pairs: + Deprecated. Returns the learned Mahalanobis distance between pairs. + + set_params: + Set the parameters of this estimator. + + set_threshold: + Sets the threshold of the metric learner to the given value `threshold`. + + transform: + Embeds data points in the learned linear embedding space. + Examples -------- >>> from metric_learn import ITML @@ -331,6 +372,34 @@ class ITML_Supervised(_BaseITML, TransformerMixin): fit: Create constraints from labels and learn the ITML model. + fit_transform: + Fit to data, then transform it. + + get_mahalanobis_matrix: + Returns a copy of the Mahalanobis matrix learned by the metric learner. + + get_metric: + Returns a function that takes as input two 1D arrays and outputs the + learned metric score on these two points. + + get_params: + Get parameters for this estimator. + + pair_distance: + Returns the (pseudo) distance between pairs, when available. + + pair_score: + Returns the similarity score between pairs of points. + + score_pairs: + Deprecated. Returns the learned Mahalanobis distance between pairs. + + set_params: + Set the parameters of this estimator. + + transform: + Embeds data points in the learned linear embedding space. + Examples -------- >>> from metric_learn import ITML_Supervised diff --git a/metric_learn/lfda.py b/metric_learn/lfda.py index a6954332..0c76e774 100644 --- a/metric_learn/lfda.py +++ b/metric_learn/lfda.py @@ -57,6 +57,34 @@ class LFDA(MahalanobisMixin, TransformerMixin): fit: Fit the LFDA model. + fit_transform: + Fit to data, then transform it. + + get_mahalanobis_matrix: + Returns a copy of the Mahalanobis matrix learned by the metric learner. + + get_metric: + Returns a function that takes as input two 1D arrays and outputs the + learned metric score on these two points. + + get_params: + Get parameters for this estimator. + + pair_distance: + Returns the (pseudo) distance between pairs, when available. + + pair_score: + Returns the similarity score between pairs of points. + + score_pairs: + Deprecated. Returns the learned Mahalanobis distance between pairs. + + set_params: + Set the parameters of this estimator. + + transform: + Embeds data points in the learned linear embedding space. + Examples -------- diff --git a/metric_learn/lmnn.py b/metric_learn/lmnn.py index f6eb6f5c..51657915 100644 --- a/metric_learn/lmnn.py +++ b/metric_learn/lmnn.py @@ -112,6 +112,34 @@ class LMNN(MahalanobisMixin, TransformerMixin): fit: Fit the LMNN model. + fit_transform: + Fit to data, then transform it. + + get_mahalanobis_matrix: + Returns a copy of the Mahalanobis matrix learned by the metric learner. + + get_metric: + Returns a function that takes as input two 1D arrays and outputs the + learned metric score on these two points. + + get_params: + Get parameters for this estimator. + + pair_distance: + Returns the (pseudo) distance between pairs, when available. + + pair_score: + Returns the similarity score between pairs of points. + + score_pairs: + Deprecated. Returns the learned Mahalanobis distance between pairs. + + set_params: + Set the parameters of this estimator. + + transform: + Embeds data points in the learned linear embedding space. + Examples -------- diff --git a/metric_learn/lsml.py b/metric_learn/lsml.py index cb3dbc50..ffd18b49 100644 --- a/metric_learn/lsml.py +++ b/metric_learn/lsml.py @@ -173,9 +173,43 @@ class LSML(_BaseLSML, _QuadrupletsClassifierMixin): Methods ------- - fit : + decision_function: + Predicts differences between sample distances in input quadruplets. + + fit: Learn the LSML model. + get_mahalanobis_matrix: + Returns a copy of the Mahalanobis matrix learned by the metric learner. + + get_metric: + Returns a function that takes as input two 1D arrays and outputs the + learned metric score on these two points. + + get_params: + Get parameters for this estimator. + + pair_distance: + Returns the (pseudo) distance between pairs, when available. + + pair_score: + Returns the similarity score between pairs of points. + + predict: + Predicts the ordering between sample distances in input quadruplets. + + score: + Computes score on input quadruplets. + + score_pairs: + Deprecated. Returns the learned Mahalanobis distance between pairs. + + set_params: + Set the parameters of this estimator. + + transform: + Embeds data points in the learned linear embedding space. + Examples -------- >>> from metric_learn import LSML @@ -292,6 +326,34 @@ class LSML_Supervised(_BaseLSML, TransformerMixin): fit : Create constraints from labels and learn the LSML model. + fit_transform: + Fit to data, then transform it. + + get_mahalanobis_matrix: + Returns a copy of the Mahalanobis matrix learned by the metric learner. + + get_metric: + Returns a function that takes as input two 1D arrays and outputs the + learned metric score on these two points. + + get_params: + Get parameters for this estimator. + + pair_distance: + Returns the (pseudo) distance between pairs, when available. + + pair_score: + Returns the similarity score between pairs of points. + + score_pairs: + Deprecated. Returns the learned Mahalanobis distance between pairs. + + set_params: + Set the parameters of this estimator. + + transform: + Embeds data points in the learned linear embedding space. + Examples -------- >>> from metric_learn import LSML_Supervised diff --git a/metric_learn/mlkr.py b/metric_learn/mlkr.py index 9d504ae2..68c31d51 100644 --- a/metric_learn/mlkr.py +++ b/metric_learn/mlkr.py @@ -95,6 +95,34 @@ class MLKR(MahalanobisMixin, TransformerMixin): fit: Fit MLKR model. + fit_transform: + Fit to data, then transform it. + + get_mahalanobis_matrix: + Returns a copy of the Mahalanobis matrix learned by the metric learner. + + get_metric: + Returns a function that takes as input two 1D arrays and outputs the + learned metric score on these two points. + + get_params: + Get parameters for this estimator. + + pair_distance: + Returns the (pseudo) distance between pairs, when available. + + pair_score: + Returns the similarity score between pairs of points. + + score_pairs: + Deprecated. Returns the learned Mahalanobis distance between pairs. + + set_params: + Set the parameters of this estimator. + + transform: + Embeds data points in the learned linear embedding space. + Examples -------- diff --git a/metric_learn/mmc.py b/metric_learn/mmc.py index b4255c80..ed6e26d7 100644 --- a/metric_learn/mmc.py +++ b/metric_learn/mmc.py @@ -393,9 +393,50 @@ class MMC(_BaseMMC, _PairsClassifierMixin): Methods ------- + calibrate_threshold: + Decision threshold calibration for pairwise binary classification. + + decision_function: + Returns the decision function used to classify the pairs. + fit: Learn the MMC model. + get_mahalanobis_matrix: + Returns a copy of the Mahalanobis matrix learned by the metric learner. + + get_metric: + Returns a function that takes as input two 1D arrays and outputs the + learned metric score on these two points. + + get_params: + Get parameters for this estimator. + + pair_distance: + Returns the (pseudo) distance between pairs, when available. + + pair_score: + Returns the similarity score between pairs of points. + + predict: + Predicts the learned metric between input pairs. (For now it just + calls decision function). + + score: + Computes score of pairs similarity prediction. + + score_pairs: + Deprecated. Returns the learned Mahalanobis distance between pairs. + + set_params: + Set the parameters of this estimator. + + set_threshold: + Sets the threshold of the metric learner to the given value `threshold`. + + transform: + Embeds data points in the learned linear embedding space. + Examples -------- >>> from metric_learn import MMC @@ -528,6 +569,34 @@ class MMC_Supervised(_BaseMMC, TransformerMixin): fit: Create constraints from labels and learn the MMC model. + fit_transform: + Fit to data, then transform it. + + get_mahalanobis_matrix: + Returns a copy of the Mahalanobis matrix learned by the metric learner. + + get_metric: + Returns a function that takes as input two 1D arrays and outputs the + learned metric score on these two points. + + get_params: + Get parameters for this estimator. + + pair_distance: + Returns the (pseudo) distance between pairs, when available. + + pair_score: + Returns the similarity score between pairs of points. + + score_pairs: + Deprecated. Returns the learned Mahalanobis distance between pairs. + + set_params: + Set the parameters of this estimator. + + transform: + Embeds data points in the learned linear embedding space. + Examples -------- >>> from metric_learn import MMC_Supervised diff --git a/metric_learn/nca.py b/metric_learn/nca.py index 932661e0..44d52660 100644 --- a/metric_learn/nca.py +++ b/metric_learn/nca.py @@ -95,6 +95,34 @@ class NCA(MahalanobisMixin, TransformerMixin): fit: Fit NCA model. + fit_transform: + Fit to data, then transform it. + + get_mahalanobis_matrix: + Returns a copy of the Mahalanobis matrix learned by the metric learner. + + get_metric: + Returns a function that takes as input two 1D arrays and outputs the + learned metric score on these two points. + + get_params: + Get parameters for this estimator. + + pair_distance: + Returns the (pseudo) distance between pairs, when available. + + pair_score: + Returns the similarity score between pairs of points. + + score_pairs: + Deprecated. Returns the learned Mahalanobis distance between pairs. + + set_params: + Set the parameters of this estimator. + + transform: + Embeds data points in the learned linear embedding space. + Examples -------- diff --git a/metric_learn/rca.py b/metric_learn/rca.py index 360e458a..a4ed7286 100644 --- a/metric_learn/rca.py +++ b/metric_learn/rca.py @@ -51,6 +51,34 @@ class RCA(MahalanobisMixin, TransformerMixin): fit: Learn the RCA model. + fit_transform: + Fit to data, then transform it. + + get_mahalanobis_matrix: + Returns a copy of the Mahalanobis matrix learned by the metric learner. + + get_metric: + Returns a function that takes as input two 1D arrays and outputs the + learned metric score on these two points. + + get_params: + Get parameters for this estimator. + + pair_distance: + Returns the (pseudo) distance between pairs, when available. + + pair_score: + Returns the similarity score between pairs of points. + + score_pairs: + Deprecated. Returns the learned Mahalanobis distance between pairs. + + set_params: + Set the parameters of this estimator. + + transform: + Embeds data points in the learned linear embedding space. + Examples -------- >>> from metric_learn import RCA @@ -167,6 +195,34 @@ class RCA_Supervised(RCA): Create constraints from labels and learn the RCA model. Needs num_constraints specified in constructor. + fit_transform: + Fit to data, then transform it. + + get_mahalanobis_matrix: + Returns a copy of the Mahalanobis matrix learned by the metric learner. + + get_metric: + Returns a function that takes as input two 1D arrays and outputs the + learned metric score on these two points. + + get_params: + Get parameters for this estimator. + + pair_distance: + Returns the (pseudo) distance between pairs, when available. + + pair_score: + Returns the similarity score between pairs of points. + + score_pairs: + Deprecated. Returns the learned Mahalanobis distance between pairs. + + set_params: + Set the parameters of this estimator. + + transform: + Embeds data points in the learned linear embedding space. + Examples -------- >>> from metric_learn import RCA_Supervised diff --git a/metric_learn/scml.py b/metric_learn/scml.py index d35edcf1..020e8531 100644 --- a/metric_learn/scml.py +++ b/metric_learn/scml.py @@ -363,9 +363,43 @@ class SCML(_BaseSCML, _TripletsClassifierMixin): Methods ------- + decision_function: + Predicts differences between sample distances in input triplets. + fit: Learn the SCML model. + get_mahalanobis_matrix: + Returns a copy of the Mahalanobis matrix learned by the metric learner. + + get_metric: + Returns a function that takes as input two 1D arrays and outputs the + learned metric score on these two points. + + get_params: + Get parameters for this estimator. + + pair_distance: + Returns the (pseudo) distance between pairs, when available. + + pair_score: + Returns the similarity score between pairs of points. + + predict: + Predicts the ordering between sample distances in input triplets. + + score: + Computes score on input triplets. + + score_pairs: + Deprecated. Returns the learned Mahalanobis distance between pairs. + + set_params: + Set the parameters of this estimator. + + transform: + Embeds data points in the learned linear embedding space. + Examples -------- >>> from metric_learn import SCML @@ -481,6 +515,34 @@ class SCML_Supervised(_BaseSCML, TransformerMixin): fit: Create constraints from labels and learn the SCML model. + fit_transform: + Fit to data, then transform it. + + get_mahalanobis_matrix: + Returns a copy of the Mahalanobis matrix learned by the metric learner. + + get_metric: + Returns a function that takes as input two 1D arrays and outputs the + learned metric score on these two points. + + get_params: + Get parameters for this estimator. + + pair_distance: + Returns the (pseudo) distance between pairs, when available. + + pair_score: + Returns the similarity score between pairs of points. + + score_pairs: + Deprecated. Returns the learned Mahalanobis distance between pairs. + + set_params: + Set the parameters of this estimator. + + transform: + Embeds data points in the learned linear embedding space. + Examples -------- >>> from metric_learn import SCML_Supervised diff --git a/metric_learn/sdml.py b/metric_learn/sdml.py index 4f04e579..b2ff3cd6 100644 --- a/metric_learn/sdml.py +++ b/metric_learn/sdml.py @@ -172,9 +172,50 @@ class SDML(_BaseSDML, _PairsClassifierMixin): Methods ------- + calibrate_threshold: + Decision threshold calibration for pairwise binary classification. + + decision_function: + Returns the decision function used to classify the pairs. + fit: Learn the SDML model. + get_mahalanobis_matrix: + Returns a copy of the Mahalanobis matrix learned by the metric learner. + + get_metric: + Returns a function that takes as input two 1D arrays and outputs the + learned metric score on these two points. + + get_params: + Get parameters for this estimator. + + pair_distance: + Returns the (pseudo) distance between pairs, when available. + + pair_score: + Returns the similarity score between pairs of points. + + predict: + Predicts the learned metric between input pairs. (For now it just + calls decision function). + + score: + Computes score of pairs similarity prediction. + + score_pairs: + Deprecated. Returns the learned Mahalanobis distance between pairs. + + set_params: + Set the parameters of this estimator. + + set_threshold: + Sets the threshold of the metric learner to the given value `threshold`. + + transform: + Embeds data points in the learned linear embedding space. + Examples -------- >>> from metric_learn import SDML_Supervised @@ -295,6 +336,34 @@ class SDML_Supervised(_BaseSDML, TransformerMixin): fit: Create constraints from labels and learn the SDML model. + fit_transform: + Fit to data, then transform it. + + get_mahalanobis_matrix: + Returns a copy of the Mahalanobis matrix learned by the metric learner. + + get_metric: + Returns a function that takes as input two 1D arrays and outputs the + learned metric score on these two points. + + get_params: + Get parameters for this estimator. + + pair_distance: + Returns the (pseudo) distance between pairs, when available. + + pair_score: + Returns the similarity score between pairs of points. + + score_pairs: + Deprecated. Returns the learned Mahalanobis distance between pairs. + + set_params: + Set the parameters of this estimator. + + transform: + Embeds data points in the learned linear embedding space. + See Also -------- metric_learn.SDML : The original weakly-supervised algorithm From 4937e050c8dbed1faabbeb9911c0ddb55664d5ec Mon Sep 17 00:00:00 2001 From: mvargas33 Date: Tue, 2 Nov 2021 13:48:29 +0100 Subject: [PATCH 07/14] Add MetricTransformer and MahalanobisMixin to auto-docs --- doc/metric_learn.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/metric_learn.rst b/doc/metric_learn.rst index 8f91d91c..4d0676b9 100644 --- a/doc/metric_learn.rst +++ b/doc/metric_learn.rst @@ -13,6 +13,8 @@ Base Classes metric_learn.Constraints metric_learn.base_metric.BaseMetricLearner + metric_learn.base_metric.MetricTransformer + metric_learn.base_metric.MahalanobisMixin metric_learn.base_metric._PairsClassifierMixin metric_learn.base_metric._TripletsClassifierMixin metric_learn.base_metric._QuadrupletsClassifierMixin From c85515d26e323345a55650b0229a504bd13946bd Mon Sep 17 00:00:00 2001 From: mvargas33 Date: Thu, 4 Nov 2021 11:32:06 +0100 Subject: [PATCH 08/14] Delete unused vars in docs. Use simple quotes --- doc/conf.py | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/doc/conf.py b/doc/conf.py index aa7ce031..0166cc58 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -38,9 +38,6 @@ html_static_path = ['_static'] htmlhelp_basename = 'metric-learndoc' -# Option to only need single backticks to refer to symbols -# default_role = 'any' - # Option to hide doctests comments in the documentation (like # doctest: # +NORMALIZE_WHITESPACE for instance) trim_doctest_flags = True @@ -67,11 +64,6 @@ # generate autosummary even if no references autosummary_generate = True -# Switch to old behavior with html4, for a good display of references, -# as described in https://github.com/sphinx-doc/sphinx/issues/6705 -# html4_writer = True - - # Temporary work-around for spacing problem between parameter and parameter # type in the doc, see https://github.com/numpy/numpydoc/issues/215. The bug # has been fixed in sphinx (https://github.com/sphinx-doc/sphinx/pull/5976) but @@ -80,8 +72,7 @@ # https://github.com/readthedocs/sphinx_rtd_theme/pull/747/files def setup(app): app.add_js_file('js/copybutton.js') - app.add_css_file("basic.css") - app.add_css_file("css/styles.css") + app.add_css_file('css/styles.css') # Remove matplotlib agg warnings from generated doc when using plt.show From da7838c2501771d626c228247ac50de1e4d58377 Mon Sep 17 00:00:00 2001 From: mvargas33 Date: Thu, 4 Nov 2021 12:48:48 +0100 Subject: [PATCH 09/14] Fix identation --- doc/conf.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/doc/conf.py b/doc/conf.py index 0166cc58..5eb312dc 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -64,6 +64,7 @@ # generate autosummary even if no references autosummary_generate = True + # Temporary work-around for spacing problem between parameter and parameter # type in the doc, see https://github.com/numpy/numpydoc/issues/215. The bug # has been fixed in sphinx (https://github.com/sphinx-doc/sphinx/pull/5976) but @@ -71,8 +72,8 @@ # In an ideal world, this would get fixed in this PR: # https://github.com/readthedocs/sphinx_rtd_theme/pull/747/files def setup(app): - app.add_js_file('js/copybutton.js') - app.add_css_file('css/styles.css') + app.add_js_file('js/copybutton.js') + app.add_css_file('css/styles.css') # Remove matplotlib agg warnings from generated doc when using plt.show From 72e4c8686c84d4ec8630139c817c88249fb1b4cc Mon Sep 17 00:00:00 2001 From: mvargas33 Date: Tue, 9 Nov 2021 16:18:02 +0100 Subject: [PATCH 10/14] Fix Github CI instead of old Travis CI --- doc/index.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/doc/index.rst b/doc/index.rst index 6ec4fb26..f9dfd83d 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -1,6 +1,6 @@ metric-learn: Metric Learning in Python ======================================= -|Travis-CI Build Status| |License| |PyPI version| |Code coverage| +|GitHub Actions Build Status| |License| |PyPI version| |Code coverage| `metric-learn `_ contains efficient Python implementations of several popular supervised and @@ -57,8 +57,8 @@ Documentation outline :ref:`genindex` | :ref:`search` -.. |Travis-CI Build Status| image:: https://api.travis-ci.org/scikit-learn-contrib/metric-learn.svg?branch=master - :target: https://travis-ci.org/scikit-learn-contrib/metric-learn +.. |GitHub Actions Build Status| image:: https://github.com/scikit-learn-contrib/metric-learn/workflows/CI/badge.svg + :target: https://github.com/scikit-learn-contrib/metric-learn/actions?query=event%3Apush+branch%3Amaster .. |PyPI version| image:: https://badge.fury.io/py/metric-learn.svg :target: http://badge.fury.io/py/metric-learn .. |License| image:: http://img.shields.io/:license-mit-blue.svg?style=flat From e53153b5a6334993ad2e590ffd59eb89aeda7293 Mon Sep 17 00:00:00 2001 From: mvargas33 Date: Tue, 9 Nov 2021 16:32:00 +0100 Subject: [PATCH 11/14] References Lists are now numbered --- doc/supervised.rst | 14 +++++++------- doc/unsupervised.rst | 2 +- doc/weakly_supervised.rst | 26 +++++++++++++------------- 3 files changed, 21 insertions(+), 21 deletions(-) diff --git a/doc/supervised.rst b/doc/supervised.rst index acd687d3..e042ee1d 100644 --- a/doc/supervised.rst +++ b/doc/supervised.rst @@ -174,9 +174,9 @@ A basic usage of this model with the Iris dataset from scikit-learn. .. container:: hatnote hatnote-gray - - Weinberger et al. `Distance Metric Learning for Large Margin Nearest Neighbor Classification `_. JMLR 2009. + [1]. Weinberger et al. `Distance Metric Learning for Large Margin Nearest Neighbor Classification `_. JMLR 2009. - - `Wikipedia entry on Large Margin Nearest Neighbor `_. + [2]. `Wikipedia entry on Large Margin Nearest Neighbor `_. .. _nca: @@ -240,9 +240,9 @@ A basic usage of this model with the Iris dataset from scikit-learn. .. container:: hatnote hatnote-gray - - Goldberger et al. `Neighbourhood Components Analysis `_. NIPS 2005. + [1]. Goldberger et al. `Neighbourhood Components Analysis `_. NIPS 2005. - - `Wikipedia entry on Neighborhood Components Analysis `_. + [2]. `Wikipedia entry on Neighborhood Components Analysis `_. .. _lfda: @@ -321,9 +321,9 @@ A basic usage of this model with the Iris dataset from scikit-learn. .. container:: hatnote hatnote-gray - - Sugiyama. `Dimensionality Reduction of Multimodal Labeled Data by Local Fisher Discriminant Analysis `_. JMLR 2007. + [1]. Sugiyama. `Dimensionality Reduction of Multimodal Labeled Data by Local Fisher Discriminant Analysis `_. JMLR 2007. - - Tang. `Local Fisher Discriminant Analysis on Beer Style Clustering `_. + [2]. Tang. `Local Fisher Discriminant Analysis on Beer Style Clustering `_. .. _mlkr: @@ -390,7 +390,7 @@ A basic usage of this model with the Iris dataset from scikit-learn. .. container:: hatnote hatnote-gray - - Weinberger et al. `Metric Learning for Kernel Regression `_. AISTATS 2007. + [1]. Weinberger et al. `Metric Learning for Kernel Regression `_. AISTATS 2007. .. _supervised_version: diff --git a/doc/unsupervised.rst b/doc/unsupervised.rst index 62ebfae2..b28155c7 100644 --- a/doc/unsupervised.rst +++ b/doc/unsupervised.rst @@ -39,4 +39,4 @@ A basic usage of this model with the Iris dataset from scikit-learn. .. container:: hatnote hatnote-gray - - On the Generalized Distance in Statistics, P.C.Mahalanobis, 1936. \ No newline at end of file + [1]. On the Generalized Distance in Statistics, P.C.Mahalanobis, 1936. \ No newline at end of file diff --git a/doc/weakly_supervised.rst b/doc/weakly_supervised.rst index ae10b059..b5d0526e 100644 --- a/doc/weakly_supervised.rst +++ b/doc/weakly_supervised.rst @@ -439,9 +439,9 @@ A basic usage of this model with pairs: .. container:: hatnote hatnote-gray - - Jason V. Davis, et al. `Information-theoretic Metric Learning `_. ICML 2007. + [1]. Jason V. Davis, et al. `Information-theoretic Metric Learning `_. ICML 2007. - - Adapted from Matlab code at http://www.cs.utexas.edu/users/pjain/itml/ . + [2]. Adapted from Matlab code at http://www.cs.utexas.edu/users/pjain/itml/ . .. _sdml: @@ -501,9 +501,9 @@ A basic usage of this model with pairs: .. container:: hatnote hatnote-gray - - Qi et al. `An efficient sparse metric learning in high-dimensional space via L1-penalized log-determinant regularization `_. ICML 2009. + [1]. Qi et al. `An efficient sparse metric learning in high-dimensional space via L1-penalized log-determinant regularization `_. ICML 2009. - - Code adapted from https://gist.github.com/kcarnold/5439945 . + [2]. Code adapted from https://gist.github.com/kcarnold/5439945 . .. _rca: @@ -554,11 +554,11 @@ A basic usage of this model with pairs: .. container:: hatnote hatnote-gray - - Shental et al. `Adjustment learning and relevant component analysis `_. ECCV 2002. + [1]. Shental et al. `Adjustment learning and relevant component analysis `_. ECCV 2002. - - Bar-Hillel et al. `Learning distance functions using equivalence relations `_. ICML 2003. + [2]. Bar-Hillel et al. `Learning distance functions using equivalence relations `_. ICML 2003. - - Bar-Hillel et al. `Learning a Mahalanobis metric from equivalence constraints `_. JMLR 2005. + [3]. Bar-Hillel et al. `Learning a Mahalanobis metric from equivalence constraints `_. JMLR 2005. .. _mmc: @@ -614,9 +614,9 @@ A basic usage of this model with pairs: .. container:: hatnote hatnote-gray - - Xing et al. `Distance metric learning with application to clustering with side-information `_. NIPS 2002. + [1]. Xing et al. `Distance metric learning with application to clustering with side-information `_. NIPS 2002. - - Adapted from Matlab code http://www.cs.cmu.edu/%7Eepxing/papers/Old_papers/code_Metric_online.tar.gz . + [2]. Adapted from Matlab code http://www.cs.cmu.edu/%7Eepxing/papers/Old_papers/code_Metric_online.tar.gz . .. _learning_on_triplets: @@ -781,9 +781,9 @@ A basic usage of this model with triplets: .. container:: hatnote hatnote-gray - - Y. Shi, A. Bellet and F. Sha. `Sparse Compositional Metric Learning. `_. (AAAI), 2014. + [1]. Y. Shi, A. Bellet and F. Sha. `Sparse Compositional Metric Learning. `_. (AAAI), 2014. - - Adapted from original `Matlab implementation. `_. + [2]. Adapted from original `Matlab implementation. `_. .. _learning_on_quadruplets: @@ -979,8 +979,8 @@ A basic usage of this model with quadruplets: .. container:: hatnote hatnote-gray - - Liu et al. `Metric Learning from Relative Comparisons by Minimizing Squared Residual `_. ICDM 2012. + [1]. Liu et al. `Metric Learning from Relative Comparisons by Minimizing Squared Residual `_. ICDM 2012. - - Code adapted from https://gist.github.com/kcarnold/5439917 . + [2]. Code adapted from https://gist.github.com/kcarnold/5439917 . From 5b081f863d40d21869aae897f8df3a6641081e68 Mon Sep 17 00:00:00 2001 From: mvargas33 Date: Tue, 9 Nov 2021 16:52:58 +0100 Subject: [PATCH 12/14] RemoveExample Code body almost everywhere --- doc/supervised.rst | 10 ---------- doc/unsupervised.rst | 2 -- doc/weakly_supervised.rst | 12 ------------ 3 files changed, 24 deletions(-) diff --git a/doc/supervised.rst b/doc/supervised.rst index e042ee1d..09077dc2 100644 --- a/doc/supervised.rst +++ b/doc/supervised.rst @@ -154,8 +154,6 @@ indicates :math:`\mathbf{x}_{i}, \mathbf{x}_{j}` belong to different classes, .. rubric:: Example Code -A basic usage of this model with the Iris dataset from scikit-learn. - :: import numpy as np @@ -220,8 +218,6 @@ the sum of probability of being correctly classified: .. rubric:: Example Code -A basic usage of this model with the Iris dataset from scikit-learn. - :: import numpy as np @@ -296,8 +292,6 @@ same class are not imposed to be close. .. rubric:: Example Code -A basic usage of this model with the Iris dataset from scikit-learn. - :: import numpy as np @@ -371,8 +365,6 @@ calculating a weighted average of all the training samples: .. rubric:: Example Code -A basic usage of this model with the Iris dataset from scikit-learn. - :: from metric_learn import MLKR @@ -429,8 +421,6 @@ last points should be less similar than the two first points). .. rubric:: Example Code -A basic usage of supervied versions of weakly supervied algorithms with the Iris dataset from scikit-learn. - :: from metric_learn import MMC_Supervised diff --git a/doc/unsupervised.rst b/doc/unsupervised.rst index b28155c7..110b07f9 100644 --- a/doc/unsupervised.rst +++ b/doc/unsupervised.rst @@ -22,8 +22,6 @@ Whitening_transformation>`_). .. rubric:: Example Code -A basic usage of this model with the Iris dataset from scikit-learn. - :: from metric_learn import Covariance diff --git a/doc/weakly_supervised.rst b/doc/weakly_supervised.rst index b5d0526e..341d959a 100644 --- a/doc/weakly_supervised.rst +++ b/doc/weakly_supervised.rst @@ -415,8 +415,6 @@ is the prior distance metric, set to identity matrix by default, .. rubric:: Example Code -A basic usage of this model with pairs: - :: from metric_learn import ITML @@ -478,8 +476,6 @@ is the off-diagonal L1 norm. .. rubric:: Example Code -A basic usage of this model with pairs: - :: from metric_learn import SDML @@ -534,8 +530,6 @@ as the Mahalanobis matrix. .. rubric:: Example Code -A basic usage of this model with pairs: - :: from metric_learn import RCA @@ -591,8 +585,6 @@ points, while constrains the sum of distances between dissimilar points: .. rubric:: Example Code -A basic usage of this model with pairs: - :: from metric_learn import MMC @@ -762,8 +754,6 @@ where :math:`[\cdot]_+` is the hinge loss. .. rubric:: Example Code -A basic usage of this model with triplets: - :: from metric_learn import SCML @@ -957,8 +947,6 @@ by default, :math:`D_{ld}(\mathbf{\cdot, \cdot})` is the LogDet divergence: .. rubric:: Example Code -A basic usage of this model with quadruplets: - :: from metric_learn import LSML From ec5e996ab6cc204213034a6f2f34a2339588a8c7 Mon Sep 17 00:00:00 2001 From: mvargas33 Date: Wed, 10 Nov 2021 13:10:55 +0100 Subject: [PATCH 13/14] Removed Methods directive. Kept warnings --- metric_learn/base_metric.py | 150 ------------------------------------ metric_learn/constraints.py | 11 --- metric_learn/covariance.py | 33 -------- metric_learn/itml.py | 79 ------------------- metric_learn/lfda.py | 33 -------- metric_learn/lmnn.py | 33 -------- metric_learn/lsml.py | 72 ----------------- metric_learn/mlkr.py | 33 -------- metric_learn/mmc.py | 79 ------------------- metric_learn/nca.py | 33 -------- metric_learn/rca.py | 67 ---------------- metric_learn/scml.py | 72 ----------------- metric_learn/sdml.py | 79 ------------------- 13 files changed, 774 deletions(-) diff --git a/metric_learn/base_metric.py b/metric_learn/base_metric.py index 15391d96..82ef5681 100644 --- a/metric_learn/base_metric.py +++ b/metric_learn/base_metric.py @@ -21,27 +21,6 @@ class BaseMetricLearner(BaseEstimator, metaclass=ABCMeta): preprocessor : array-like, shape=(n_samples, n_features) or callable The preprocessor to call to get tuples from indices. If array-like, tuples will be gotten like this: X[indices]. - - Methods - ------- - get_metric: - Returns a function that takes as input two 1D arrays and outputs the - learned metric score on these two points. - - get_params: - Get parameters for this estimator. - - pair_distance: - Returns the (pseudo) distance between pairs, when available. - - pair_score: - Returns the similarity score between pairs of points. - - score_pairs: - Deprecated. Returns the score between pairs. - - set_params: - Set the parameters of this estimator. """ def __init__(self, preprocessor=None): @@ -249,11 +228,6 @@ class MetricTransformer(metaclass=ABCMeta): """ Base class for all learners that can transform data into a new space with the metric learned. - - Methods - ------- - transform: - Applies the metric transformation. """ @abstractmethod def transform(self, X): @@ -289,33 +263,6 @@ class MahalanobisMixin(BaseMetricLearner, MetricTransformer, ---------- components_ : `numpy.ndarray`, shape=(n_components, n_features) The learned linear transformation ``L``. - - Methods - ------- - get_mahalanobis_matrix: - Returns a copy of the Mahalanobis matrix learned by the metric learner. - - get_metric: - Returns a function that takes as input two 1D arrays and outputs the - learned metric score on these two points. - - get_params: - Get parameters for this estimator. - - pair_distance: - Returns the (pseudo) distance between pairs, when available. - - pair_score: - Returns the similarity score between pairs of points. - - score_pairs: - Deprecated. Returns the learned Mahalanobis distance between pairs. - - set_params: - Set the parameters of this estimator. - - transform: - Embeds data points in the learned linear embedding space. """ def score_pairs(self, pairs): @@ -521,43 +468,6 @@ class _PairsClassifierMixin(BaseMetricLearner): If the distance metric between two points is lower than this threshold, points will be classified as similar, otherwise they will be classified as dissimilar. - - Methods - ------- - calibrate_threshold: - Decision threshold calibration for pairwise binary classification.\ - - decision_function: - Returns the decision function used to classify the pairs. - - get_metric: - Returns a function that takes as input two 1D arrays and outputs the - learned metric score on these two points. - - get_params: - Get parameters for this estimator. - - pair_distance: - Returns the (pseudo) distance between pairs, when available. - - pair_score: - Returns the similarity score between pairs of points. - - predict: - Predicts the learned metric between input pairs. (For now it just - calls decision function). - - score: - Computes score of pairs similarity prediction. - - score_pairs: - Deprecated. Returns the score between pairs. - - set_params: - Set the parameters of this estimator. - - set_threshold: - Sets the threshold of the metric learner to the given value `threshold`. """ _tuple_size = 2 # number of points in a tuple, 2 for pairs @@ -840,36 +750,6 @@ def _validate_calibration_params(strategy='accuracy', min_rate=None, class _TripletsClassifierMixin(BaseMetricLearner): """ Base class for triplets learners. - - Methods - ------- - decision_function: - Predicts differences between sample distances in input triplets. - - get_metric: - Returns a function that takes as input two 1D arrays and outputs the - learned metric score on these two points. - - get_params: - Get parameters for this estimator. - - pair_distance: - Returns the (pseudo) distance between pairs, when available. - - pair_score: - Returns the similarity score between pairs of points. - - predict: - Predicts the ordering between sample distances in input triplets. - - score: - Computes score on input triplets. - - score_pairs: - Deprecated. Returns the score between pairs. - - set_params: - Set the parameters of this estimator. """ _tuple_size = 3 # number of points in a tuple, 3 for triplets @@ -955,36 +835,6 @@ def score(self, triplets): class _QuadrupletsClassifierMixin(BaseMetricLearner): """ Base class for quadruplets learners. - - Methods - ------- - decision_function: - Predicts differences between sample distances in input quadruplets. - - get_metric: - Returns a function that takes as input two 1D arrays and outputs the - learned metric score on these two points. - - get_params: - Get parameters for this estimator. - - pair_distance: - Returns the (pseudo) distance between pairs, when available. - - pair_score: - Returns the similarity score between pairs of points. - - predict: - Predicts the ordering between sample distances in input quadruplets. - - score: - Computes score on input quadruplets. - - score_pairs: - Deprecated. Returns the score between pairs. - - set_params: - Set the parameters of this estimator. """ _tuple_size = 4 # number of points in a tuple, 4 for quadruplets diff --git a/metric_learn/constraints.py b/metric_learn/constraints.py index 2b619a57..68e205f6 100644 --- a/metric_learn/constraints.py +++ b/metric_learn/constraints.py @@ -25,17 +25,6 @@ class Constraints(object): ---------- partial_labels : `numpy.ndarray` of ints, shape=(n_samples,) Array of labels, with -1 indicating unknown label. - - Methods - ------- - chunks: - Generates chunks from labeled data. - - generate_knntriplets: - Generates triplets from labeled data. - - positive_negative_pairs: - Generates positive pairs and negative pairs from labeled data. """ def __init__(self, partial_labels): diff --git a/metric_learn/covariance.py b/metric_learn/covariance.py index 510eab70..2c05b28d 100644 --- a/metric_learn/covariance.py +++ b/metric_learn/covariance.py @@ -27,39 +27,6 @@ class Covariance(MahalanobisMixin, TransformerMixin): The linear transformation ``L`` deduced from the learned Mahalanobis metric (See function `components_from_metric`.) - Methods - ------- - fit: - Calculates the covariance matrix of the input data. - - fit_transform: - Fit to data, then transform it. - - get_mahalanobis_matrix: - Returns a copy of the Mahalanobis matrix learned by the metric learner. - - get_metric: - Returns a function that takes as input two 1D arrays and outputs the - learned metric score on these two points. - - get_params: - Get parameters for this estimator. - - pair_distance: - Returns the (pseudo) distance between pairs, when available. - - pair_score: - Returns the similarity score between pairs of points. - - score_pairs: - Deprecated. Returns the learned Mahalanobis distance between pairs. - - set_params: - Set the parameters of this estimator. - - transform: - Embeds data points in the learned linear embedding space. - Examples -------- >>> from metric_learn import Covariance diff --git a/metric_learn/itml.py b/metric_learn/itml.py index f0a4c7cf..af87f530 100644 --- a/metric_learn/itml.py +++ b/metric_learn/itml.py @@ -181,52 +181,6 @@ class ITML(_BaseITML, _PairsClassifierMixin): points will be classified as similar, otherwise they will be classified as dissimilar. - Methods - ------- - calibrate_threshold: - Decision threshold calibration for pairwise binary classification. - - decision_function: - Returns the decision function used to classify the pairs. - - fit: - Learn the ITML model. - - get_mahalanobis_matrix: - Returns a copy of the Mahalanobis matrix learned by the metric learner. - - get_metric: - Returns a function that takes as input two 1D arrays and outputs the - learned metric score on these two points. - - get_params: - Get parameters for this estimator. - - pair_distance: - Returns the (pseudo) distance between pairs, when available. - - pair_score: - Returns the similarity score between pairs of points. - - predict: - Predicts the learned metric between input pairs. (For now it just - calls decision function). - - score: - Computes score of pairs similarity prediction. - - score_pairs: - Deprecated. Returns the learned Mahalanobis distance between pairs. - - set_params: - Set the parameters of this estimator. - - set_threshold: - Sets the threshold of the metric learner to the given value `threshold`. - - transform: - Embeds data points in the learned linear embedding space. - Examples -------- >>> from metric_learn import ITML @@ -367,39 +321,6 @@ class ITML_Supervised(_BaseITML, TransformerMixin): The linear transformation ``L`` deduced from the learned Mahalanobis metric (See function `components_from_metric`.) - Methods - ------- - fit: - Create constraints from labels and learn the ITML model. - - fit_transform: - Fit to data, then transform it. - - get_mahalanobis_matrix: - Returns a copy of the Mahalanobis matrix learned by the metric learner. - - get_metric: - Returns a function that takes as input two 1D arrays and outputs the - learned metric score on these two points. - - get_params: - Get parameters for this estimator. - - pair_distance: - Returns the (pseudo) distance between pairs, when available. - - pair_score: - Returns the similarity score between pairs of points. - - score_pairs: - Deprecated. Returns the learned Mahalanobis distance between pairs. - - set_params: - Set the parameters of this estimator. - - transform: - Embeds data points in the learned linear embedding space. - Examples -------- >>> from metric_learn import ITML_Supervised diff --git a/metric_learn/lfda.py b/metric_learn/lfda.py index 0c76e774..82ae20eb 100644 --- a/metric_learn/lfda.py +++ b/metric_learn/lfda.py @@ -52,39 +52,6 @@ class LFDA(MahalanobisMixin, TransformerMixin): components_ : `numpy.ndarray`, shape=(n_components, n_features) The learned linear transformation ``L``. - Methods - ------- - fit: - Fit the LFDA model. - - fit_transform: - Fit to data, then transform it. - - get_mahalanobis_matrix: - Returns a copy of the Mahalanobis matrix learned by the metric learner. - - get_metric: - Returns a function that takes as input two 1D arrays and outputs the - learned metric score on these two points. - - get_params: - Get parameters for this estimator. - - pair_distance: - Returns the (pseudo) distance between pairs, when available. - - pair_score: - Returns the similarity score between pairs of points. - - score_pairs: - Deprecated. Returns the learned Mahalanobis distance between pairs. - - set_params: - Set the parameters of this estimator. - - transform: - Embeds data points in the learned linear embedding space. - Examples -------- diff --git a/metric_learn/lmnn.py b/metric_learn/lmnn.py index 51657915..8bdc4bf0 100644 --- a/metric_learn/lmnn.py +++ b/metric_learn/lmnn.py @@ -107,39 +107,6 @@ class LMNN(MahalanobisMixin, TransformerMixin): components_ : `numpy.ndarray`, shape=(n_components, n_features) The learned linear transformation ``L``. - Methods - ------- - fit: - Fit the LMNN model. - - fit_transform: - Fit to data, then transform it. - - get_mahalanobis_matrix: - Returns a copy of the Mahalanobis matrix learned by the metric learner. - - get_metric: - Returns a function that takes as input two 1D arrays and outputs the - learned metric score on these two points. - - get_params: - Get parameters for this estimator. - - pair_distance: - Returns the (pseudo) distance between pairs, when available. - - pair_score: - Returns the similarity score between pairs of points. - - score_pairs: - Deprecated. Returns the learned Mahalanobis distance between pairs. - - set_params: - Set the parameters of this estimator. - - transform: - Embeds data points in the learned linear embedding space. - Examples -------- diff --git a/metric_learn/lsml.py b/metric_learn/lsml.py index ffd18b49..28f65ce7 100644 --- a/metric_learn/lsml.py +++ b/metric_learn/lsml.py @@ -171,45 +171,6 @@ class LSML(_BaseLSML, _QuadrupletsClassifierMixin): The linear transformation ``L`` deduced from the learned Mahalanobis metric (See function `components_from_metric`.) - Methods - ------- - decision_function: - Predicts differences between sample distances in input quadruplets. - - fit: - Learn the LSML model. - - get_mahalanobis_matrix: - Returns a copy of the Mahalanobis matrix learned by the metric learner. - - get_metric: - Returns a function that takes as input two 1D arrays and outputs the - learned metric score on these two points. - - get_params: - Get parameters for this estimator. - - pair_distance: - Returns the (pseudo) distance between pairs, when available. - - pair_score: - Returns the similarity score between pairs of points. - - predict: - Predicts the ordering between sample distances in input quadruplets. - - score: - Computes score on input quadruplets. - - score_pairs: - Deprecated. Returns the learned Mahalanobis distance between pairs. - - set_params: - Set the parameters of this estimator. - - transform: - Embeds data points in the learned linear embedding space. - Examples -------- >>> from metric_learn import LSML @@ -321,39 +282,6 @@ class LSML_Supervised(_BaseLSML, TransformerMixin): prior. In any case, `random_state` is also used to randomly sample constraints from labels. - Methods - ------- - fit : - Create constraints from labels and learn the LSML model. - - fit_transform: - Fit to data, then transform it. - - get_mahalanobis_matrix: - Returns a copy of the Mahalanobis matrix learned by the metric learner. - - get_metric: - Returns a function that takes as input two 1D arrays and outputs the - learned metric score on these two points. - - get_params: - Get parameters for this estimator. - - pair_distance: - Returns the (pseudo) distance between pairs, when available. - - pair_score: - Returns the similarity score between pairs of points. - - score_pairs: - Deprecated. Returns the learned Mahalanobis distance between pairs. - - set_params: - Set the parameters of this estimator. - - transform: - Embeds data points in the learned linear embedding space. - Examples -------- >>> from metric_learn import LSML_Supervised diff --git a/metric_learn/mlkr.py b/metric_learn/mlkr.py index 68c31d51..01d185e7 100644 --- a/metric_learn/mlkr.py +++ b/metric_learn/mlkr.py @@ -90,39 +90,6 @@ class MLKR(MahalanobisMixin, TransformerMixin): components_ : `numpy.ndarray`, shape=(n_components, n_features) The learned linear transformation ``L``. - Methods - ------- - fit: - Fit MLKR model. - - fit_transform: - Fit to data, then transform it. - - get_mahalanobis_matrix: - Returns a copy of the Mahalanobis matrix learned by the metric learner. - - get_metric: - Returns a function that takes as input two 1D arrays and outputs the - learned metric score on these two points. - - get_params: - Get parameters for this estimator. - - pair_distance: - Returns the (pseudo) distance between pairs, when available. - - pair_score: - Returns the similarity score between pairs of points. - - score_pairs: - Deprecated. Returns the learned Mahalanobis distance between pairs. - - set_params: - Set the parameters of this estimator. - - transform: - Embeds data points in the learned linear embedding space. - Examples -------- diff --git a/metric_learn/mmc.py b/metric_learn/mmc.py index ed6e26d7..1ff30b1e 100644 --- a/metric_learn/mmc.py +++ b/metric_learn/mmc.py @@ -391,52 +391,6 @@ class MMC(_BaseMMC, _PairsClassifierMixin): points will be classified as similar, otherwise they will be classified as dissimilar. - Methods - ------- - calibrate_threshold: - Decision threshold calibration for pairwise binary classification. - - decision_function: - Returns the decision function used to classify the pairs. - - fit: - Learn the MMC model. - - get_mahalanobis_matrix: - Returns a copy of the Mahalanobis matrix learned by the metric learner. - - get_metric: - Returns a function that takes as input two 1D arrays and outputs the - learned metric score on these two points. - - get_params: - Get parameters for this estimator. - - pair_distance: - Returns the (pseudo) distance between pairs, when available. - - pair_score: - Returns the similarity score between pairs of points. - - predict: - Predicts the learned metric between input pairs. (For now it just - calls decision function). - - score: - Computes score of pairs similarity prediction. - - score_pairs: - Deprecated. Returns the learned Mahalanobis distance between pairs. - - set_params: - Set the parameters of this estimator. - - set_threshold: - Sets the threshold of the metric learner to the given value `threshold`. - - transform: - Embeds data points in the learned linear embedding space. - Examples -------- >>> from metric_learn import MMC @@ -564,39 +518,6 @@ class MMC_Supervised(_BaseMMC, TransformerMixin): Mahalanobis matrix. In any case, `random_state` is also used to randomly sample constraints from labels. - Methods - ------- - fit: - Create constraints from labels and learn the MMC model. - - fit_transform: - Fit to data, then transform it. - - get_mahalanobis_matrix: - Returns a copy of the Mahalanobis matrix learned by the metric learner. - - get_metric: - Returns a function that takes as input two 1D arrays and outputs the - learned metric score on these two points. - - get_params: - Get parameters for this estimator. - - pair_distance: - Returns the (pseudo) distance between pairs, when available. - - pair_score: - Returns the similarity score between pairs of points. - - score_pairs: - Deprecated. Returns the learned Mahalanobis distance between pairs. - - set_params: - Set the parameters of this estimator. - - transform: - Embeds data points in the learned linear embedding space. - Examples -------- >>> from metric_learn import MMC_Supervised diff --git a/metric_learn/nca.py b/metric_learn/nca.py index 44d52660..7b4423d3 100644 --- a/metric_learn/nca.py +++ b/metric_learn/nca.py @@ -90,39 +90,6 @@ class NCA(MahalanobisMixin, TransformerMixin): transformation. If ``init='pca'``, ``random_state`` is passed as an argument to PCA when initializing the transformation. - Methods - ------- - fit: - Fit NCA model. - - fit_transform: - Fit to data, then transform it. - - get_mahalanobis_matrix: - Returns a copy of the Mahalanobis matrix learned by the metric learner. - - get_metric: - Returns a function that takes as input two 1D arrays and outputs the - learned metric score on these two points. - - get_params: - Get parameters for this estimator. - - pair_distance: - Returns the (pseudo) distance between pairs, when available. - - pair_score: - Returns the similarity score between pairs of points. - - score_pairs: - Deprecated. Returns the learned Mahalanobis distance between pairs. - - set_params: - Set the parameters of this estimator. - - transform: - Embeds data points in the learned linear embedding space. - Examples -------- diff --git a/metric_learn/rca.py b/metric_learn/rca.py index a4ed7286..a63aa1d8 100644 --- a/metric_learn/rca.py +++ b/metric_learn/rca.py @@ -46,39 +46,6 @@ class RCA(MahalanobisMixin, TransformerMixin): The preprocessor to call to get tuples from indices. If array-like, tuples will be formed like this: X[indices]. - Methods - ------- - fit: - Learn the RCA model. - - fit_transform: - Fit to data, then transform it. - - get_mahalanobis_matrix: - Returns a copy of the Mahalanobis matrix learned by the metric learner. - - get_metric: - Returns a function that takes as input two 1D arrays and outputs the - learned metric score on these two points. - - get_params: - Get parameters for this estimator. - - pair_distance: - Returns the (pseudo) distance between pairs, when available. - - pair_score: - Returns the similarity score between pairs of points. - - score_pairs: - Deprecated. Returns the learned Mahalanobis distance between pairs. - - set_params: - Set the parameters of this estimator. - - transform: - Embeds data points in the learned linear embedding space. - Examples -------- >>> from metric_learn import RCA @@ -189,40 +156,6 @@ class RCA_Supervised(RCA): A pseudo random number generator object or a seed for it if int. It is used to randomly sample constraints from labels. - Methods - ------- - fit: - Create constraints from labels and learn the RCA model. - Needs num_constraints specified in constructor. - - fit_transform: - Fit to data, then transform it. - - get_mahalanobis_matrix: - Returns a copy of the Mahalanobis matrix learned by the metric learner. - - get_metric: - Returns a function that takes as input two 1D arrays and outputs the - learned metric score on these two points. - - get_params: - Get parameters for this estimator. - - pair_distance: - Returns the (pseudo) distance between pairs, when available. - - pair_score: - Returns the similarity score between pairs of points. - - score_pairs: - Deprecated. Returns the learned Mahalanobis distance between pairs. - - set_params: - Set the parameters of this estimator. - - transform: - Embeds data points in the learned linear embedding space. - Examples -------- >>> from metric_learn import RCA_Supervised diff --git a/metric_learn/scml.py b/metric_learn/scml.py index 020e8531..2bdd0d57 100644 --- a/metric_learn/scml.py +++ b/metric_learn/scml.py @@ -361,45 +361,6 @@ class SCML(_BaseSCML, _TripletsClassifierMixin): The linear transformation ``L`` deduced from the learned Mahalanobis metric (See function `_components_from_basis_weights`.) - Methods - ------- - decision_function: - Predicts differences between sample distances in input triplets. - - fit: - Learn the SCML model. - - get_mahalanobis_matrix: - Returns a copy of the Mahalanobis matrix learned by the metric learner. - - get_metric: - Returns a function that takes as input two 1D arrays and outputs the - learned metric score on these two points. - - get_params: - Get parameters for this estimator. - - pair_distance: - Returns the (pseudo) distance between pairs, when available. - - pair_score: - Returns the similarity score between pairs of points. - - predict: - Predicts the ordering between sample distances in input triplets. - - score: - Computes score on input triplets. - - score_pairs: - Deprecated. Returns the learned Mahalanobis distance between pairs. - - set_params: - Set the parameters of this estimator. - - transform: - Embeds data points in the learned linear embedding space. - Examples -------- >>> from metric_learn import SCML @@ -510,39 +471,6 @@ class SCML_Supervised(_BaseSCML, TransformerMixin): The linear transformation ``L`` deduced from the learned Mahalanobis metric (See function `_components_from_basis_weights`.) - Methods - ------- - fit: - Create constraints from labels and learn the SCML model. - - fit_transform: - Fit to data, then transform it. - - get_mahalanobis_matrix: - Returns a copy of the Mahalanobis matrix learned by the metric learner. - - get_metric: - Returns a function that takes as input two 1D arrays and outputs the - learned metric score on these two points. - - get_params: - Get parameters for this estimator. - - pair_distance: - Returns the (pseudo) distance between pairs, when available. - - pair_score: - Returns the similarity score between pairs of points. - - score_pairs: - Deprecated. Returns the learned Mahalanobis distance between pairs. - - set_params: - Set the parameters of this estimator. - - transform: - Embeds data points in the learned linear embedding space. - Examples -------- >>> from metric_learn import SCML_Supervised diff --git a/metric_learn/sdml.py b/metric_learn/sdml.py index b2ff3cd6..a0736ffa 100644 --- a/metric_learn/sdml.py +++ b/metric_learn/sdml.py @@ -170,52 +170,6 @@ class SDML(_BaseSDML, _PairsClassifierMixin): points will be classified as similar, otherwise they will be classified as dissimilar. - Methods - ------- - calibrate_threshold: - Decision threshold calibration for pairwise binary classification. - - decision_function: - Returns the decision function used to classify the pairs. - - fit: - Learn the SDML model. - - get_mahalanobis_matrix: - Returns a copy of the Mahalanobis matrix learned by the metric learner. - - get_metric: - Returns a function that takes as input two 1D arrays and outputs the - learned metric score on these two points. - - get_params: - Get parameters for this estimator. - - pair_distance: - Returns the (pseudo) distance between pairs, when available. - - pair_score: - Returns the similarity score between pairs of points. - - predict: - Predicts the learned metric between input pairs. (For now it just - calls decision function). - - score: - Computes score of pairs similarity prediction. - - score_pairs: - Deprecated. Returns the learned Mahalanobis distance between pairs. - - set_params: - Set the parameters of this estimator. - - set_threshold: - Sets the threshold of the metric learner to the given value `threshold`. - - transform: - Embeds data points in the learned linear embedding space. - Examples -------- >>> from metric_learn import SDML_Supervised @@ -331,39 +285,6 @@ class SDML_Supervised(_BaseSDML, TransformerMixin): The linear transformation ``L`` deduced from the learned Mahalanobis metric (See function `components_from_metric`.) - Methods - ------- - fit: - Create constraints from labels and learn the SDML model. - - fit_transform: - Fit to data, then transform it. - - get_mahalanobis_matrix: - Returns a copy of the Mahalanobis matrix learned by the metric learner. - - get_metric: - Returns a function that takes as input two 1D arrays and outputs the - learned metric score on these two points. - - get_params: - Get parameters for this estimator. - - pair_distance: - Returns the (pseudo) distance between pairs, when available. - - pair_score: - Returns the similarity score between pairs of points. - - score_pairs: - Deprecated. Returns the learned Mahalanobis distance between pairs. - - set_params: - Set the parameters of this estimator. - - transform: - Embeds data points in the learned linear embedding space. - See Also -------- metric_learn.SDML : The original weakly-supervised algorithm From 407486ed54a65dee5fe0aad9b52c3ddc38509da0 Mon Sep 17 00:00:00 2001 From: mvargas33 Date: Wed, 10 Nov 2021 13:48:20 +0100 Subject: [PATCH 14/14] Deprecated directive now is red as in sklearn --- doc/_static/css/styles.css | 13 +++++++++++++ metric_learn/base_metric.py | 25 +++++++++++++------------ 2 files changed, 26 insertions(+), 12 deletions(-) diff --git a/doc/_static/css/styles.css b/doc/_static/css/styles.css index 324dee6b..6d350ae4 100644 --- a/doc/_static/css/styles.css +++ b/doc/_static/css/styles.css @@ -20,4 +20,17 @@ list-style-type: square; margin-left: 0px !important; margin-bottom: 0px !important; +} +.deprecated { + color: #b94a48; + background-color: #F3E5E5; + border-color: #eed3d7; + margin-top: 0.5rem; + padding: 0.5rem; + border-radius: 0.5rem; + margin-bottom: 0.5rem; +} + +.deprecated p { + margin-bottom: 0 !important; } \ No newline at end of file diff --git a/metric_learn/base_metric.py b/metric_learn/base_metric.py index 82ef5681..7b449c8e 100644 --- a/metric_learn/base_metric.py +++ b/metric_learn/base_metric.py @@ -29,16 +29,17 @@ def __init__(self, preprocessor=None): @abstractmethod def score_pairs(self, pairs): """ - .. deprecated:: 0.7.0 Refer to `pair_distance` and `pair_score`. + Returns the score between pairs + (can be a similarity, or a distance/metric depending on the algorithm) + + .. deprecated:: 0.7.0 + Refer to `pair_distance` and `pair_score`. .. warning:: This method will be removed in 0.8.0. Please refer to `pair_distance` or `pair_score`. This change will occur in order to add learners that don't necessarily learn a Mahalanobis distance. - Returns the score between pairs - (can be a similarity, or a distance/metric depending on the algorithm) - Parameters ---------- pairs : array-like, shape=(n_pairs, 2, n_features) or (n_pairs, 2) @@ -267,14 +268,6 @@ class MahalanobisMixin(BaseMetricLearner, MetricTransformer, def score_pairs(self, pairs): r""" - .. deprecated:: 0.7.0 - This method is deprecated. Please use `pair_distance` instead. - - .. warning:: - This method will be removed in 0.8.0. Please refer to `pair_distance` - or `pair_score`. This change will occur in order to add learners - that don't necessarily learn a Mahalanobis distance. - Returns the learned Mahalanobis distance between pairs. This distance is defined as: :math:`d_M(x, x') = \\sqrt{(x-x')^T M (x-x')}` @@ -285,6 +278,14 @@ def score_pairs(self, pairs): x_e')^T (x_e- x_e')}`, with :math:`x_e = L x` (See :class:`MahalanobisMixin`). + .. deprecated:: 0.7.0 + Please use `pair_distance` instead. + + .. warning:: + This method will be removed in 0.8.0. Please refer to `pair_distance` + or `pair_score`. This change will occur in order to add learners + that don't necessarily learn a Mahalanobis distance. + Parameters ---------- pairs : array-like, shape=(n_pairs, 2, n_features) or (n_pairs, 2)