--- title: Metrics keywords: fastai sidebar: home_sidebar summary: "Metrics." description: "Metrics." nb_path: "nbs/evaluation/evaluation.metrics.ipynb" ---
{% raw %}
{% endraw %} {% raw %}
{% endraw %} {% raw %}

NDCG[source]

NDCG(true, pred)

{% endraw %} {% raw %}

APAK[source]

APAK(true, pred)

{% endraw %} {% raw %}

HR[source]

HR(true, pred)

{% endraw %} {% raw %}

get_eval_metrics[source]

get_eval_metrics(scores, true, k=10)

{% endraw %} {% raw %}
{% endraw %} {% raw %}
scores = [{1: 0.2, 2: 0.3, 3: 0.4, 4: 0.5, 9: 0.1},
          {1: 0.2, 2: 0.3, 3: 0.4, 4: 0.5, 9: 0.1},
          {1: 0.2, 2: 0.3, 3: 0.4, 4: 0.5, 9: 0.1},
          {1: 0.2, 2: 0.3, 3: 0.4, 4: 0.5, 9: 0.1},
          {1: 0.2, 2: 0.3, 3: 0.4, 4: 0.5, 9: 0.1}]

true = torch.tensor([[1],[1],[2],[3],[4]])
metric = get_eval_metrics(scores, true, k=3)
metric
(0.4261859357357025, 0.36666667461395264, 0.6000000238418579)
{% endraw %} {% raw %}
true = torch.tensor([[4],[4],[4],[4],[4]])
metric = get_eval_metrics(scores, true, k=3)
metric
(1.0, 1.0, 1.0)
{% endraw %} {% raw %}
true = torch.tensor([[9],[1],[9],[1],[1]])
metric = get_eval_metrics(scores, true, k=3)
metric
(0.0, 0.0, 0.0)
{% endraw %}