Skip to content

Commit c378e4b

Browse files
authored
[MRG] Correct a few small doc issues following #280 (#281)
* add docstrings for constraints class, pairs and chunks methods * fix missing optional values and descriptions, uniformize * fix indentation problems in docstring and uniformize * fix more small things * cosmit * remove unnecessary line * missing blank line for pep8
1 parent 833e186 commit c378e4b

File tree

13 files changed

+87
-71
lines changed

13 files changed

+87
-71
lines changed

doc/conf.py

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -69,3 +69,14 @@
6969
# Switch to old behavior with html4, for a good display of references,
7070
# as described in https://github.com/sphinx-doc/sphinx/issues/6705
7171
html4_writer = True
72+
73+
74+
# Temporary work-around for spacing problem between parameter and parameter
75+
# type in the doc, see https://github.com/numpy/numpydoc/issues/215. The bug
76+
# has been fixed in sphinx (https://github.com/sphinx-doc/sphinx/pull/5976) but
77+
# through a change in sphinx basic.css except rtd_theme does not use basic.css.
78+
# In an ideal world, this would get fixed in this PR:
79+
# https://github.com/readthedocs/sphinx_rtd_theme/pull/747/files
80+
def setup(app):
81+
app.add_javascript('js/copybutton.js')
82+
app.add_stylesheet("basic.css")

doc/weakly_supervised.rst

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -483,7 +483,7 @@ is the off-diagonal L1 norm.
483483
L1-penalized log-determinant regularization <https://icml.cc/Conferences/2009/papers/46.pdf>`_.
484484
ICML 2009.
485485
486-
.. [2] Adapted from https://gist.github.com/kcarnold/5439945
486+
.. [2] Code adapted from https://gist.github.com/kcarnold/5439945
487487
488488
.. _rca:
489489

@@ -893,6 +893,6 @@ by default, :math:`D_{ld}(\mathbf{\cdot, \cdot})` is the LogDet divergence:
893893
`Metric Learning from Relative Comparisons by Minimizing Squared
894894
Residual <http://www.cs.ucla.edu/~weiwang/paper/ICDM12.pdf>`_. ICDM 2012
895895
896-
.. [2] Adapted from https://gist.github.com/kcarnold/5439917
896+
.. [2] Code adapted from https://gist.github.com/kcarnold/5439917
897897
898898

metric_learn/base_metric.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ def score_pairs(self, pairs):
3939
4040
Returns
4141
-------
42-
scores: `numpy.ndarray` of shape=(n_pairs,)
42+
scores : `numpy.ndarray` of shape=(n_pairs,)
4343
The score of every pair.
4444
4545
See Also
@@ -69,27 +69,27 @@ def _prepare_inputs(self, X, y=None, type_of_inputs='classic',
6969
7070
Parameters
7171
----------
72-
input: array-like
72+
X : array-like
7373
The input data array to check.
7474
7575
y : array-like
7676
The input labels array to check.
7777
78-
type_of_inputs: `str` {'classic', 'tuples'}
78+
type_of_inputs : `str` {'classic', 'tuples'}
7979
The type of inputs to check. If 'classic', the input should be
8080
a 2D array-like of points or a 1D array like of indicators of points. If
8181
'tuples', the input should be a 3D array-like of tuples or a 2D
8282
array-like of indicators of tuples.
8383
84-
**kwargs: dict
84+
**kwargs : dict
8585
Arguments to pass to check_input.
8686
8787
Returns
8888
-------
8989
X : `numpy.ndarray`
9090
The checked input data array.
9191
92-
y: `numpy.ndarray` (optional)
92+
y : `numpy.ndarray` (optional)
9393
The checked input labels array.
9494
"""
9595
self._check_preprocessor()
@@ -203,7 +203,7 @@ def score_pairs(self, pairs):
203203
204204
Returns
205205
-------
206-
scores: `numpy.ndarray` of shape=(n_pairs,)
206+
scores : `numpy.ndarray` of shape=(n_pairs,)
207207
The learned Mahalanobis distance for every pair.
208208
209209
See Also
@@ -271,7 +271,7 @@ def metric_fun(u, v, squared=False):
271271
272272
Returns
273273
-------
274-
distance: float
274+
distance : float
275275
The distance between u and v according to the new metric.
276276
"""
277277
u = validate_vector(u)

metric_learn/constraints.py

Lines changed: 16 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -20,12 +20,12 @@ class Constraints(object):
2020
Parameters
2121
----------
2222
partial_labels : `numpy.ndarray` of ints, shape=(n_samples,)
23-
Array of labels, with -1 indicating unknown label.
23+
Array of labels, with -1 indicating unknown label.
2424
2525
Attributes
2626
----------
2727
partial_labels : `numpy.ndarray` of ints, shape=(n_samples,)
28-
Array of labels, with -1 indicating unknown label.
28+
Array of labels, with -1 indicating unknown label.
2929
"""
3030

3131
def __init__(self, partial_labels):
@@ -46,26 +46,29 @@ def positive_negative_pairs(self, num_constraints, same_length=False,
4646
4747
Parameters
4848
----------
49-
num_constraints : int
50-
Number of positive and negative constraints to generate.
51-
same_length : bool, optional (default=False)
52-
If True, forces the number of positive and negative pairs to be
53-
equal by ignoring some pairs from the larger set.
54-
random_state : int or numpy.RandomState or None, optional (default=None)
55-
A pseudo random number generator object or a seed for it if int.
49+
num_constraints : int
50+
Number of positive and negative constraints to generate.
51+
52+
same_length : bool, optional (default=False)
53+
If True, forces the number of positive and negative pairs to be
54+
equal by ignoring some pairs from the larger set.
55+
56+
random_state : int or numpy.RandomState or None, optional (default=None)
57+
A pseudo random number generator object or a seed for it if int.
58+
5659
Returns
5760
-------
5861
a : array-like, shape=(n_constraints,)
59-
1D array of indicators for the left elements of positive pairs.
62+
1D array of indicators for the left elements of positive pairs.
6063
6164
b : array-like, shape=(n_constraints,)
62-
1D array of indicators for the right elements of positive pairs.
65+
1D array of indicators for the right elements of positive pairs.
6366
6467
c : array-like, shape=(n_constraints,)
65-
1D array of indicators for the left elements of negative pairs.
68+
1D array of indicators for the left elements of negative pairs.
6669
6770
d : array-like, shape=(n_constraints,)
68-
1D array of indicators for the right elements of negative pairs.
71+
1D array of indicators for the right elements of negative pairs.
6972
"""
7073
random_state = check_random_state(random_state)
7174
a, b = self._pairs(num_constraints, same_label=True,

metric_learn/itml.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -211,9 +211,9 @@ class ITML(_BaseITML, _PairsClassifierMixin):
211211
212212
References
213213
----------
214-
.. [1] `Information-theoretic Metric Learning
214+
.. [1] Jason V. Davis, et al. `Information-theoretic Metric Learning
215215
<http://www.prateekjain.org/publications/all_papers\
216-
/DavisKJSD07_ICML.pdf>`_ Jason V. Davis, et al.
216+
/DavisKJSD07_ICML.pdf>`_. ICML 2007.
217217
"""
218218

219219
def fit(self, pairs, y, bounds=None, calibration_params=None):
@@ -229,8 +229,10 @@ def fit(self, pairs, y, bounds=None, calibration_params=None):
229229
3D Array of pairs with each row corresponding to two points,
230230
or 2D array of indices of pairs if the metric learner uses a
231231
preprocessor.
232+
232233
y: array-like, of shape (n_constraints,)
233234
Labels of constraints. Should be -1 for dissimilar pair, 1 for similar.
235+
234236
bounds : array-like of two numbers
235237
Bounds on similarity, aside slack variables, s.t.
236238
``d(a, b) < bounds_[0]`` for all given pairs of similar points ``a``
@@ -239,6 +241,7 @@ def fit(self, pairs, y, bounds=None, calibration_params=None):
239241
If not provided at initialization, bounds_[0] and bounds_[1] will be
240242
set to the 5th and 95th percentile of the pairwise distances among all
241243
points present in the input `pairs`.
244+
242245
calibration_params : `dict` or `None`
243246
Dictionary of parameters to give to `calibrate_threshold` for the
244247
threshold calibration step done at the end of `fit`. If `None` is
@@ -280,7 +283,7 @@ class ITML_Supervised(_BaseITML, TransformerMixin):
280283
`num_labeled` was deprecated in version 0.5.0 and will
281284
be removed in 0.6.0.
282285
283-
num_constraints: int, optional (default=None)
286+
num_constraints : int, optional (default=None)
284287
Number of constraints to generate. If None, default to `20 *
285288
num_classes**2`.
286289

metric_learn/lfda.py

Lines changed: 18 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -39,10 +39,16 @@ class LFDA(MahalanobisMixin, TransformerMixin):
3939
defaults to min(7, n_features - 1).
4040
4141
embedding_type : str, optional (default: 'weighted')
42-
Type of metric in the embedding space
43-
'weighted' - weighted eigenvectors
44-
'orthonormalized' - orthonormalized
45-
'plain' - raw eigenvectors
42+
Type of metric in the embedding space.
43+
44+
'weighted'
45+
weighted eigenvectors
46+
47+
'orthonormalized'
48+
orthonormalized
49+
50+
'plain'
51+
raw eigenvectors
4652
4753
preprocessor : array-like, shape=(n_samples, n_features) or callable
4854
The preprocessor to call to get tuples from indices. If array-like,
@@ -67,13 +73,14 @@ class LFDA(MahalanobisMixin, TransformerMixin):
6773
6874
References
6975
------------------
70-
.. [1] `Dimensionality Reduction of Multimodal Labeled Data by Local Fisher
71-
Discriminant Analysis <http://www.ms.k.u-tokyo.ac.jp/2007/LFDA.pdf>`_
72-
Masashi Sugiyama.
73-
74-
.. [2] `Local Fisher Discriminant Analysis on Beer Style Clustering
75-
<https://gastrograph.com/resources/whitepapers/local-fisher\
76-
-discriminant-analysis-on-beer-style-clustering.html#>`_ Yuan Tang.
76+
.. [1] Masashi Sugiyama. `Dimensionality Reduction of Multimodal Labeled
77+
Data by Local Fisher Discriminant Analysis
78+
<http://www.ms.k.u-tokyo.ac.jp/2007/LFDA.pdf>`_. JMLR 2007.
79+
80+
.. [2] Yuan Tang. `Local Fisher Discriminant Analysis on Beer Style
81+
Clustering
82+
<https://gastrograph.com/resources/whitepapers/local-fisher\
83+
-discriminant-analysis-on-beer-style-clustering.html#>`_.
7784
'''
7885

7986
def __init__(self, n_components=None, num_dims='deprecated',

metric_learn/lmnn.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -137,10 +137,11 @@ class LMNN(MahalanobisMixin, TransformerMixin):
137137
138138
References
139139
----------
140-
.. [1] `Distance Metric Learning for Large Margin Nearest Neighbor
141-
Classification <http://papers.nips.cc/paper/2795-distance-metric\
142-
-learning-for-large-margin-nearest-neighbor-classification>`_
143-
Kilian Q. Weinberger, John Blitzer, Lawrence K. Saul
140+
.. [1] K. Q. Weinberger, J. Blitzer, L. K. Saul. `Distance Metric
141+
Learning for Large Margin Nearest Neighbor Classification
142+
<http://papers.nips.cc/paper/2795-distance-metric\
143+
-learning-for-large-margin-nearest-neighbor-classification>`_. NIPS
144+
2005.
144145
"""
145146

146147
def __init__(self, init=None, k=3, min_iter=50, max_iter=1000,

metric_learn/lsml.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -208,7 +208,7 @@ class LSML(_BaseLSML, _QuadrupletsClassifierMixin):
208208
Squared Residual
209209
<http://www.cs.ucla.edu/~weiwang/paper/ICDM12.pdf>`_. ICDM 2012.
210210
211-
.. [2] Adapted from https://gist.github.com/kcarnold/5439917
211+
.. [2] Code adapted from https://gist.github.com/kcarnold/5439917
212212
213213
See Also
214214
--------

metric_learn/mlkr.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -73,15 +73,15 @@ class MLKR(MahalanobisMixin, TransformerMixin):
7373
:meth:`fit` and n_features_a must be less than or equal to that.
7474
If ``n_components`` is not None, n_features_a must match it.
7575
76-
A0: Not used.
76+
A0 : Not used.
7777
.. deprecated:: 0.5.0
7878
`A0` was deprecated in version 0.5.0 and will
7979
be removed in 0.6.0. Use 'init' instead.
8080
81-
tol: float, optional (default=None)
81+
tol : float, optional (default=None)
8282
Convergence tolerance for the optimization.
8383
84-
max_iter: int, optional (default=1000)
84+
max_iter : int, optional (default=1000)
8585
Cap on number of conjugate gradient iterations.
8686
8787
verbose : bool, optional (default=False)
@@ -118,9 +118,9 @@ class MLKR(MahalanobisMixin, TransformerMixin):
118118
119119
References
120120
----------
121-
.. [1] `Information-theoretic Metric Learning
122-
<http://machinelearning.wustl.edu/\
123-
mlpapers/paper_files/icml2007_DavisKJSD07.pdf>`_ Jason V. Davis, et al.
121+
.. [1] K.Q. Weinberger and G. Tesauto. `Metric Learning for Kernel
122+
Regression <http://proceedings.mlr.press/v2/weinberger07a\
123+
/weinberger07a.pdf>`_. AISTATS 2007.
124124
"""
125125

126126
def __init__(self, n_components=None, num_dims='deprecated', init=None,

metric_learn/mmc.py

Lines changed: 5 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -383,10 +383,6 @@ class MMC(_BaseMMC, _PairsClassifierMixin):
383383
An SPD matrix of shape (n_features, n_features), that will
384384
be used as such to initialize the metric.
385385
386-
preprocessor : array-like, shape=(n_samples, n_features) or callable
387-
The preprocessor to call to get tuples from indices. If array-like,
388-
tuples will be gotten like this: X[indices].
389-
390386
A0 : Not used.
391387
.. deprecated:: 0.5.0
392388
`A0` was deprecated in version 0.5.0 and will
@@ -442,10 +438,11 @@ class MMC(_BaseMMC, _PairsClassifierMixin):
442438
443439
References
444440
----------
445-
.. [1] `Distance metric learning with application to clustering with
446-
side-information <http://papers.nips.cc/paper/2164-distance-metric-\
447-
learning-with-application-to-clustering-with-side-information.pdf>`_
448-
Xing, Jordan, Russell, Ng.
441+
.. [1] Xing, Jordan, Russell, Ng. `Distance metric learning with application
442+
to clustering with side-information
443+
<http://papers.nips.cc/paper/2164-distance-metric-\
444+
learning-with-application-to-clustering-with-side-information.pdf>`_.
445+
NIPS 2002.
449446
450447
See Also
451448
--------
@@ -538,10 +535,6 @@ class MMC_Supervised(_BaseMMC, TransformerMixin):
538535
A numpy array of shape (n_features, n_features), that will
539536
be used as such to initialize the metric.
540537
541-
preprocessor : array-like, shape=(n_samples, n_features) or callable
542-
The preprocessor to call to get tuples from indices. If array-like,
543-
tuples will be gotten like this: X[indices].
544-
545538
A0 : Not used.
546539
.. deprecated:: 0.5.0
547540
`A0` was deprecated in version 0.5.0 and will

metric_learn/nca.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -123,7 +123,7 @@ class NCA(MahalanobisMixin, TransformerMixin):
123123
.. [1] J. Goldberger, G. Hinton, S. Roweis, R. Salakhutdinov. `Neighbourhood
124124
Components Analysis
125125
<http://www.cs.nyu.edu/~roweis/papers/ncanips.pdf>`_.
126-
Advances in Neural Information Processing Systems. 17, 513-520, 2005.
126+
NIPS 2005.
127127
128128
.. [2] Wikipedia entry on `Neighborhood Components Analysis
129129
<https://en.wikipedia.org/wiki/Neighbourhood_components_analysis>`_

metric_learn/rca.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -72,10 +72,10 @@ class RCA(MahalanobisMixin, TransformerMixin):
7272
7373
References
7474
------------------
75-
.. [1] `Adjustment learning and relevant component analysis
76-
<http://citeseerx.ist.\
77-
psu.edu/viewdoc/download?doi=10.1.1.19.2871&rep=rep1&type=pdf>`_ Noam
78-
Shental, et al.
75+
.. [1] Noam Shental, et al. `Adjustment learning and relevant component
76+
analysis <http://citeseerx.ist.\
77+
psu.edu/viewdoc/download?doi=10.1.1.19.2871&rep=rep1&type=pdf>`_ .
78+
ECCV 2002.
7979
8080
8181
Attributes

metric_learn/sdml.py

Lines changed: 5 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -211,14 +211,12 @@ class SDML(_BaseSDML, _PairsClassifierMixin):
211211
212212
References
213213
----------
214+
.. [1] Qi et al. `An efficient sparse metric learning in high-dimensional
215+
space via L1-penalized log-determinant regularization
216+
<http://www.machinelearning.org/archive/icml2009/papers/46.pdf>`_.
217+
ICML 2009.
214218
215-
.. [1] Qi et al.
216-
An efficient sparse metric learning in high-dimensional space via
217-
L1-penalized log-determinant regularization. ICML 2009.
218-
http://lms.comp.nus.edu.sg/sites/default/files/publication\
219-
-attachments/icml09-guojun.pdf
220-
221-
.. [2] Adapted from https://gist.github.com/kcarnold/5439945
219+
.. [2] Code adapted from https://gist.github.com/kcarnold/5439945
222220
"""
223221

224222
def fit(self, pairs, y, calibration_params=None):

0 commit comments

Comments
 (0)