Esempio n. 1
0
// Wrapper for the `train` function in liblinear.
//
// `model* train(const struct problem *prob, const struct parameter *param);`
//
// The explanation of parameters are:
//
// solverType:
//
//   for multi-class classification
//          0 -- L2-regularized logistic regression (primal)
//          1 -- L2-regularized L2-loss support vector classification (dual)
//          2 -- L2-regularized L2-loss support vector classification (primal)
//          3 -- L2-regularized L1-loss support vector classification (dual)
//          4 -- support vector classification by Crammer and Singer
//          5 -- L1-regularized L2-loss support vector classification
//          6 -- L1-regularized logistic regression
//          7 -- L2-regularized logistic regression (dual)
//   for regression
//         11 -- L2-regularized L2-loss support vector regression (primal)
//         12 -- L2-regularized L2-loss support vector regression (dual)
//         13 -- L2-regularized L1-loss support vector regression (dual)
//
// eps is the stopping criterion.
//
// C_ is the cost of constraints violation.
//
// p is the sensitiveness of loss of support vector regression.
//
// classWeights is a map from int to float64, with the key be the class and the
// value be the weight. For example, {1: 10, -1: 0.5} means giving weight=10 for
// class=1 while weight=0.5 for class=-1
//
// If you do not want to change penalty for any of the classes, just set
// classWeights to nil.
func Train(X, y *mat64.Dense, bias float64, pm *Parameter) *Model {

	var problem C.struct_problem

	nRows, nCols := X.Dims()

	cY := mapCDouble(y.Col(nil, 0))
	cX := toFeatureNodes(X)
	problem.x = &cX[0]
	problem.y = &cY[0]
	problem.n = C.int(nCols)
	problem.l = C.int(nRows)
	problem.bias = C.double(bias)

	model := C.train(&problem, pm.GetPtr())
	return &Model{
		cModel: model,
	}
}
Esempio n. 2
0
func Train(prob *Problem, param *Parameter) *Model {
	return &Model{C.train(&prob.c_prob, &param.c_param)}
}
Esempio n. 3
0
func Train(prob *Problem, param *Parameter) *Model {
	libLinearHookPrintFunc() // Sets up logging
	return &Model{C.train(&prob.c_prob, &param.c_param)}
}