Skip to content

Commit

Permalink
docs: fix example formatting (#144)
Browse files Browse the repository at this point in the history
  • Loading branch information
Borda authored Mar 29, 2021
1 parent fb587af commit 53d5701
Show file tree
Hide file tree
Showing 15 changed files with 22 additions and 43 deletions.
5 changes: 2 additions & 3 deletions torchmetrics/classification/auroc.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,16 +78,15 @@ class AUROC(Metric):
ValueError:
If the mode of data (binary, multi-label, multi-class) changes between batches.
Example:
>>> # binary case
Example (binary case):
>>> from torchmetrics import AUROC
>>> preds = torch.tensor([0.13, 0.26, 0.08, 0.19, 0.34])
>>> target = torch.tensor([0, 0, 1, 1, 1])
>>> auroc = AUROC(pos_label=1)
>>> auroc(preds, target)
tensor(0.5000)
>>> # multiclass case
Example (multiclass case):
>>> preds = torch.tensor([[0.90, 0.05, 0.05],
... [0.05, 0.90, 0.05],
... [0.05, 0.05, 0.90],
Expand Down
5 changes: 2 additions & 3 deletions torchmetrics/classification/average_precision.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,16 +52,15 @@ class AveragePrecision(Metric):
process_group:
Specify the process group on which synchronization is called. default: None (which selects the entire world)
Example:
>>> # binary case
Example (binary case):
>>> from torchmetrics import AveragePrecision
>>> pred = torch.tensor([0, 1, 2, 3])
>>> target = torch.tensor([0, 1, 1, 1])
>>> average_precision = AveragePrecision(pos_label=1)
>>> average_precision(pred, target)
tensor(1.)
>>> # multiclass case
Example (multiclass case):
>>> pred = torch.tensor([[0.75, 0.05, 0.05, 0.05, 0.05],
... [0.05, 0.75, 0.05, 0.05, 0.05],
... [0.05, 0.05, 0.75, 0.05, 0.05],
Expand Down
9 changes: 3 additions & 6 deletions torchmetrics/classification/hinge.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,8 +60,7 @@ class Hinge(Metric):
If ``multiclass_mode`` is not: None, ``MulticlassMode.CRAMMER_SINGER``, ``"crammer-singer"``,
``MulticlassMode.ONE_VS_ALL`` or ``"one-vs-all"``.
Example:
# binary example
Example (binary case):
>>> import torch
>>> from torchmetrics import Hinge
>>> target = torch.tensor([0, 1, 1])
Expand All @@ -70,16 +69,14 @@ class Hinge(Metric):
>>> hinge(preds, target)
tensor(0.3000)
# multiclass example, default mode
Example (default / multiclass case):
>>> target = torch.tensor([0, 1, 2])
>>> preds = torch.tensor([[-1.0, 0.9, 0.2], [0.5, -1.1, 0.8], [2.2, -0.5, 0.3]])
>>> hinge = Hinge()
>>> hinge(preds, target)
tensor(2.9000)
# multiclass example, one vs all mode
Example (multiclass example, one vs all mode):
>>> target = torch.tensor([0, 1, 2])
>>> preds = torch.tensor([[-1.0, 0.9, 0.2], [0.5, -1.1, 0.8], [2.2, -0.5, 0.3]])
>>> hinge = Hinge(multiclass_mode="one-vs-all")
Expand Down
1 change: 0 additions & 1 deletion torchmetrics/classification/matthews_corrcoef.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,6 @@ class MatthewsCorrcoef(Metric):
will be used to perform the allgather
Example:
>>> from torchmetrics import MatthewsCorrcoef
>>> target = torch.tensor([1, 1, 0, 0])
>>> preds = torch.tensor([0, 1, 0, 0])
Expand Down
5 changes: 2 additions & 3 deletions torchmetrics/classification/precision_recall_curve.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,8 +52,7 @@ class PrecisionRecallCurve(Metric):
process_group:
Specify the process group on which synchronization is called. default: None (which selects the entire world)
Example:
>>> # binary case
Example (binary case):
>>> from torchmetrics import PrecisionRecallCurve
>>> pred = torch.tensor([0, 1, 2, 3])
>>> target = torch.tensor([0, 1, 1, 0])
Expand All @@ -66,7 +65,7 @@ class PrecisionRecallCurve(Metric):
>>> thresholds
tensor([1, 2, 3])
>>> # multiclass case
Example (multiclass case):
>>> pred = torch.tensor([[0.75, 0.05, 0.05, 0.05, 0.05],
... [0.05, 0.75, 0.05, 0.05, 0.05],
... [0.05, 0.05, 0.75, 0.05, 0.05],
Expand Down
5 changes: 0 additions & 5 deletions torchmetrics/classification/roc.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,6 @@ class ROC(Metric):
will be used to perform the allgather
Example (binary case):
>>> from torchmetrics import ROC
>>> pred = torch.tensor([0, 1, 2, 3])
>>> target = torch.tensor([0, 1, 1, 1])
Expand All @@ -67,8 +66,6 @@ class ROC(Metric):
tensor([4, 3, 2, 1, 0])
Example (multiclass case):
>>> from torchmetrics import ROC
>>> pred = torch.tensor([[0.75, 0.05, 0.05, 0.05],
... [0.05, 0.75, 0.05, 0.05],
... [0.05, 0.05, 0.75, 0.05],
Expand All @@ -87,8 +84,6 @@ class ROC(Metric):
tensor([1.7500, 0.7500, 0.0500])]
Example (multilabel case):
>>> from torchmetrics import ROC
>>> pred = torch.tensor([[0.8191, 0.3680, 0.1138],
... [0.3584, 0.7576, 0.1183],
... [0.2286, 0.3468, 0.1338],
Expand Down
5 changes: 2 additions & 3 deletions torchmetrics/collections.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,8 +46,7 @@ class MetricCollection(nn.ModuleDict):
ValueError:
If ``metrics`` is not a ``list``, ``tuple`` or a ``dict``.
Example:
>>> # input as list
Example (input as list):
>>> import torch
>>> from pprint import pprint
>>> from torchmetrics import MetricCollection, Accuracy, Precision, Recall
Expand All @@ -59,7 +58,7 @@ class MetricCollection(nn.ModuleDict):
>>> metrics(preds, target)
{'Accuracy': tensor(0.1250), 'Precision': tensor(0.0667), 'Recall': tensor(0.1111)}
>>> # input as dict
Example (input as dict):
>>> metrics = MetricCollection({'micro_recall': Recall(num_classes=3, average='micro'),
... 'macro_recall': Recall(num_classes=3, average='macro')})
>>> same_metric = metrics.clone()
Expand Down
5 changes: 2 additions & 3 deletions torchmetrics/functional/classification/auroc.py
Original file line number Diff line number Diff line change
Expand Up @@ -177,15 +177,14 @@ def auroc(
ValueError:
If ``average`` is none of ``None``, ``"macro"`` or ``"weighted"``.
Example:
>>> # binary case
Example (binary case):
>>> from torchmetrics.functional import auroc
>>> preds = torch.tensor([0.13, 0.26, 0.08, 0.19, 0.34])
>>> target = torch.tensor([0, 0, 1, 1, 1])
>>> auroc(preds, target, pos_label=1)
tensor(0.5000)
>>> # multiclass case
Example (multiclass case):
>>> preds = torch.tensor([[0.90, 0.05, 0.05],
... [0.05, 0.90, 0.05],
... [0.05, 0.05, 0.90],
Expand Down
5 changes: 2 additions & 3 deletions torchmetrics/functional/classification/average_precision.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,15 +77,14 @@ def average_precision(
tensor with average precision. If multiclass will return list
of such tensors, one for each class
Example:
>>> # binary case
Example (binary case):
>>> from torchmetrics.functional import average_precision
>>> pred = torch.tensor([0, 1, 2, 3])
>>> target = torch.tensor([0, 1, 1, 1])
>>> average_precision(pred, target, pos_label=1)
tensor(1.)
>>> # multiclass case
Example (multiclass case):
>>> pred = torch.tensor([[0.75, 0.05, 0.05, 0.05, 0.05],
... [0.05, 0.75, 0.05, 0.05, 0.05],
... [0.05, 0.05, 0.75, 0.05, 0.05],
Expand Down
9 changes: 3 additions & 6 deletions torchmetrics/functional/classification/hinge.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,24 +154,21 @@ def hinge(
If ``multiclass_mode`` is not: None, ``MulticlassMode.CRAMMER_SINGER``, ``"crammer-singer"``,
``MulticlassMode.ONE_VS_ALL`` or ``"one-vs-all"``.
Example:
# binary example
Example (binary case):
>>> import torch
>>> from torchmetrics.functional import hinge
>>> target = torch.tensor([0, 1, 1])
>>> preds = torch.tensor([-2.2, 2.4, 0.1])
>>> hinge(preds, target)
tensor(0.3000)
# multiclass example, default mode
Example (default / multiclass case):
>>> target = torch.tensor([0, 1, 2])
>>> preds = torch.tensor([[-1.0, 0.9, 0.2], [0.5, -1.1, 0.8], [2.2, -0.5, 0.3]])
>>> hinge(preds, target)
tensor(2.9000)
# multiclass example, one vs all mode
Example (multiclass example, one vs all mode):
>>> target = torch.tensor([0, 1, 2])
>>> preds = torch.tensor([[-1.0, 0.9, 0.2], [0.5, -1.1, 0.8], [2.2, -0.5, 0.3]])
>>> hinge(preds, target, multiclass_mode="one-vs-all")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -203,8 +203,7 @@ def precision_recall_curve(
If the number of classes deduced from ``preds`` is not the same as the
``num_classes`` provided.
Example:
>>> # binary case
Example (binary case):
>>> from torchmetrics.functional import precision_recall_curve
>>> pred = torch.tensor([0, 1, 2, 3])
>>> target = torch.tensor([0, 1, 1, 0])
Expand All @@ -216,7 +215,7 @@ def precision_recall_curve(
>>> thresholds
tensor([1, 2, 3])
>>> # multiclass case
Example (multiclass case):
>>> pred = torch.tensor([[0.75, 0.05, 0.05, 0.05, 0.05],
... [0.05, 0.75, 0.05, 0.05, 0.05],
... [0.05, 0.05, 0.75, 0.05, 0.05],
Expand Down
3 changes: 0 additions & 3 deletions torchmetrics/functional/classification/roc.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,6 @@ def roc(
If multiclass or multilabel, this is a list of such tensors, one for each class/label.
Example (binary case):
>>> from torchmetrics.functional import roc
>>> pred = torch.tensor([0, 1, 2, 3])
>>> target = torch.tensor([0, 1, 1, 1])
Expand All @@ -134,7 +133,6 @@ def roc(
tensor([4, 3, 2, 1, 0])
Example (multiclass case):
>>> from torchmetrics.functional import roc
>>> pred = torch.tensor([[0.75, 0.05, 0.05, 0.05],
... [0.05, 0.75, 0.05, 0.05],
Expand All @@ -153,7 +151,6 @@ def roc(
tensor([1.7500, 0.7500, 0.0500])]
Example (multilabel case):
>>> from torchmetrics.functional import roc
>>> pred = torch.tensor([[0.8191, 0.3680, 0.1138],
... [0.3584, 0.7576, 0.1183],
Expand Down
1 change: 1 addition & 0 deletions torchmetrics/functional/retrieval/average_precision.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ def retrieval_average_precision(preds: Tensor, target: Tensor) -> Tensor:
a single-value tensor with the average precision (AP) of the predictions ``preds`` w.r.t. the labels ``target``.
Example:
>>> from torchmetrics.functional import retrieval_average_precision
>>> preds = tensor([0.2, 0.3, 0.5])
>>> target = tensor([True, False, True])
>>> retrieval_average_precision(preds, target)
Expand Down
1 change: 1 addition & 0 deletions torchmetrics/functional/retrieval/reciprocal_rank.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ def retrieval_reciprocal_rank(preds: Tensor, target: Tensor) -> Tensor:
a single-value tensor with the reciprocal rank (RR) of the predictions ``preds`` wrt the labels ``target``.
Example:
>>> from torchmetrics.functional import retrieval_reciprocal_rank
>>> preds = torch.tensor([0.2, 0.3, 0.5])
>>> target = torch.tensor([False, True, False])
>>> retrieval_reciprocal_rank(preds, target)
Expand Down
1 change: 0 additions & 1 deletion torchmetrics/utilities/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -242,7 +242,6 @@ def get_group_indexes(idx: Tensor) -> List[Tensor]:
A list of integer `torch.Tensor`s
Example:
>>> indexes = torch.tensor([0, 0, 0, 1, 1, 1, 1])
>>> groups = get_group_indexes(indexes)
>>> groups
Expand Down

0 comments on commit 53d5701

Please sign in to comment.