metrics

package
v0.0.0-...-0705f78 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Apr 30, 2018 License: MIT Imports: 3 Imported by: 0

Documentation

Index

Examples

Constants

This section is empty.

Variables

This section is empty.

Functions

func AccuracyScore

func AccuracyScore(Ytrue, Ypred mat.Matrix, normalize bool, sampleWeight *mat.Dense) float64

AccuracyScore reports (weighted) true values/nSamples

Example
package main

import (
	"fmt"

	"gonum.org/v1/gonum/mat"
)

func main() {
	var nilDense *mat.Dense
	normalize, sampleWeight := true, nilDense
	Ypred, Ytrue := mat.NewDense(4, 1, []float64{0, 2, 1, 3}), mat.NewDense(4, 1, []float64{0, 1, 2, 3})
	fmt.Println(AccuracyScore(Ytrue, Ypred, normalize, sampleWeight))
	fmt.Println(AccuracyScore(mat.NewDense(2, 2, []float64{0, 1, 1, 1}), mat.NewDense(2, 2, []float64{1, 1, 1, 1}), normalize, sampleWeight))
}

// >>> y_true = [0, 1, 2, 0, 1, 2]
//     >>> y_pred = [0, 2, 1, 0, 0, 1]
//     >>> precision_score(y_true, y_pred, average='macro')  # doctest: +ELLIPSIS
//     0.22...

func ExemplePrecisionScore() {
	Ytrue, Ypred := mat.NewDense(6, 1, []float64{0, 1, 2, 0, 1, 2}), mat.NewDense(6, 1, []float64{0, 2, 1, 0, 0, 1})
	fmt.Printf("%.2f", PrecisionScore(Ytrue, Ypred))
	// Output:
	// 0.22
}

func ExempleRecallScore() {
	Ytrue, Ypred := mat.NewDense(6, 1, []float64{0, 1, 2, 0, 1, 2}), mat.NewDense(6, 1, []float64{0, 2, 1, 0, 0, 1})
	fmt.Printf("%.2f", RecallScore(Ytrue, Ypred))
	// Output:
	// 0.33
}

func ExempleF1Score() {
	Ytrue, Ypred := mat.NewDense(6, 1, []float64{0, 1, 2, 0, 1, 2}), mat.NewDense(6, 1, []float64{0, 2, 1, 0, 0, 1})
	fmt.Printf("%.2f", F1Score(Ytrue, Ypred))
	// Output:
	// 0.26
}

func ExempleFBetaScore() {
	Ytrue, Ypred := mat.NewDense(6, 1, []float64{0, 1, 2, 0, 1, 2}), mat.NewDense(6, 1, []float64{0, 2, 1, 0, 0, 1})
	fmt.Printf("%.2f", FBetaScore(Ytrue, Ypred, .5))
	
Output:

0.5
0.5

func F1Score

func F1Score(Ytrue, Ypred mat.Matrix) float64

F1Score v https://en.wikipedia.org/wiki/F1_score

func FBetaScore

func FBetaScore(Ytrue, Ypred mat.Matrix, beta float64) float64

FBetaScore is the weighted harmonic mean of precision and recall,

reaching its optimal value at 1 and its worst value at 0.
The `beta` parameter determines the weight of precision in the combined
score. ``beta < 1`` lends more weight to precision, while ``beta > 1``
favors recall (``beta -> 0`` considers only precision, ``beta -> inf``
only recall)

func MeanAbsoluteError

func MeanAbsoluteError(yTrue, yPred mat.Matrix, sampleWeight *mat.Dense, multioutput string) *mat.Dense

MeanAbsoluteError regression loss Read more in the :ref:`User Guide <mean_absolute_error>`. Parameters ---------- y_true : array-like of shape = (n_samples) or (n_samples, n_outputs)

Ground truth (correct) target values.

y_pred : array-like of shape = (n_samples) or (n_samples, n_outputs)

Estimated target values.

sample_weight : array-like of shape = (n_samples), optional

Sample weights.

multioutput : string in ['raw_values', 'uniform_average']

or array-like of shape (n_outputs)
Defines aggregating of multiple output values.
Array-like value defines weights used to average errors.
'raw_values' :
    Returns a full set of errors in case of multioutput input.
'uniform_average' :
    Errors of all outputs are averaged with uniform weight.

Returns ------- loss : float or ndarray of floats

If multioutput is 'raw_values', then mean absolute error is returned
for each output separately.
If multioutput is 'uniform_average' or an ndarray of weights, then the
weighted average of all output errors is returned.
MAE output is non-negative floating point. The best value is 0.0.

Examples -------- >>> from sklearn.metrics import mean_absolute_error >>> y_true = [3, -0.5, 2, 7] >>> y_pred = [2.5, 0.0, 2, 8] >>> mean_absolute_error(y_true, y_pred) 0.5 >>> y_true = [[0.5, 1], [-1, 1], [7, -6]] >>> y_pred = [[0, 2], [-1, 2], [8, -5]] >>> mean_absolute_error(y_true, y_pred) 0.75 >>> mean_absolute_error(y_true, y_pred, multioutput='raw_values') array([ 0.5, 1. ]) >>> mean_absolute_error(y_true, y_pred, multioutput=[0.3, 0.7]) ... # doctest: +ELLIPSIS 0.849...

func MeanSquaredError

func MeanSquaredError(yTrue, yPred mat.Matrix, sampleWeight *mat.Dense, multioutput string) *mat.Dense

MeanSquaredError regression loss Read more in the :ref:`User Guide <mean_squared_error>`. Parameters ---------- y_true : array-like of shape = (n_samples) or (n_samples, n_outputs)

Ground truth (correct) target values.

y_pred : array-like of shape = (n_samples) or (n_samples, n_outputs)

Estimated target values.

sample_weight : array-like of shape = (n_samples), optional

Sample weights.

multioutput : string in ['raw_values', 'uniform_average']

or array-like of shape (n_outputs)
Defines aggregating of multiple output values.
Array-like value defines weights used to average errors.
'raw_values' :
    Returns a full set of errors in case of multioutput input.
'uniform_average' :
    Errors of all outputs are averaged with uniform weight.

Returns ------- loss : float or ndarray of floats

A non-negative floating point value (the best value is 0.0), or an
array of floating point values, one for each individual target.

func PrecisionScore

func PrecisionScore(Ytrue, Ypred mat.Matrix) float64

PrecisionScore v https://en.wikipedia.org/wiki/F1_score

func R2Score

func R2Score(yTrue, yPred *mat.Dense, sampleWeight *mat.Dense, multioutput string) *mat.Dense

R2Score """R^2 (coefficient of determination) regression score function. Best possible score is 1.0 and it can be negative (because the model can be arbitrarily worse). A constant model that always predicts the expected value of y, disregarding the input features, would get a R^2 score of 0.0. Read more in the :ref:`User Guide <r2Score>`. Parameters ---------- yTrue : array-like of shape = (nSamples) or (nSamples, nOutputs)

Ground truth (correct) target values.

yPred : array-like of shape = (nSamples) or (nSamples, nOutputs)

Estimated target values.

sampleWeight : array-like of shape = (nSamples), optional

Sample weights.

multioutput : string in ['rawValues', 'uniformAverage', \ 'varianceWeighted'] or None or array-like of shape (nOutputs)

Defines aggregating of multiple output scores.
Array-like value defines weights used to average scores.
Default is "uniformAverage".
'rawValues' :
    Returns a full set of scores in case of multioutput input.
'uniformAverage' :
    Scores of all outputs are averaged with uniform weight.
'varianceWeighted' :
    Scores of all outputs are averaged, weighted by the variances
    of each individual output.
.. versionchanged:: 0.19
    Default value of multioutput is 'uniformAverage'.

Returns ------- z : float or ndarray of floats

The R^2 score or ndarray of scores if 'multioutput' is
'rawValues'.

Notes ----- This is not a symmetric function. Unlike most other scores, R^2 score may be negative (it need not actually be the square of a quantity R). References ---------- .. [1] `Wikipedia entry on the Coefficient of determination

<https://en.wikipedia.org/wiki/CoefficientOfDetermination>`_

Examples -------- >>> from sklearn.metrics import r2Score >>> yTrue = [3, -0.5, 2, 7] >>> yPred = [2.5, 0.0, 2, 8] >>> r2Score(yTrue, yPred) # doctest: +ELLIPSIS 0.948... >>> yTrue = [[0.5, 1], [-1, 1], [7, -6]] >>> yPred = [[0, 2], [-1, 2], [8, -5]] >>> r2Score(yTrue, yPred, multioutput='varianceWeighted') ... # doctest: +ELLIPSIS 0.938... >>> yTrue = [1,2,3] >>> yPred = [1,2,3] >>> r2Score(yTrue, yPred) 1.0 >>> yTrue = [1,2,3] >>> yPred = [2,2,2] >>> r2Score(yTrue, yPred) 0.0 >>> yTrue = [1,2,3] >>> yPred = [3,2,1] >>> r2Score(yTrue, yPred) -3.0 """

func RecallScore

func RecallScore(Ytrue, Ypred mat.Matrix) float64

RecallScore v https://en.wikipedia.org/wiki/F1_score

Types

This section is empty.

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL