// Perform cross validation. The instances in the problem are separated // in the given number of folds. Each fold is sequentially evaluated // using the model trained with the remaining folds. The slice that is // returned contains the predicted instance classes. func CrossValidation(problem *Problem, param Parameters, nFolds uint) ([]float64, error) { cParam := toCParameter(param) defer func() { C.destroy_param_wrap(cParam) C.free(unsafe.Pointer(cParam)) }() r := C.check_parameter_wrap(problem.problem, cParam) if r != nil { msg := C.GoString(r) return nil, errors.New(msg) } nInstances := uint(problem.problem.l) target := newDouble(C.size_t(nInstances)) defer C.free(unsafe.Pointer(target)) C.cross_validation_wrap(problem.problem, cParam, C.int(nFolds), target) classifications := make([]float64, nInstances) for idx, _ := range classifications { classifications[idx] = float64(C.get_double_idx(target, C.int(idx))) } return classifications, nil }
// Bias extracts the bias of a two-class problem. func (model *Model) Bias() float64 { if model.model.nr_class != 2 { panic(fmt.Sprint("not exactly two classes: ", model.model.nr_class)) } // model.nr_feature does not include bias. n := model.model.nr_feature return float64(C.get_double_idx(model.model.w, n)) }
// Weights extracts the weight vector of a two-class problem. func (model *Model) Weights() []float64 { if model.model.nr_class != 2 { panic(fmt.Sprint("not exactly two classes: ", model.model.nr_class)) } n := model.model.nr_feature weights := make([]float64, n) for i := range weights { weights[i] = float64(C.get_double_idx(model.model.w, C.int(i))) } return weights }
// Iterate over the training instances in a problem. func (problem *Problem) Iterate(fun ProblemIterFunc) { for i := 0; i < int(problem.problem.l); i++ { label := float64(C.get_double_idx(problem.problem.y, C.int(i))) cNodes := C.nodes_vector_get(problem.problem, C.size_t(i)) fVals := make(FeatureVector, 0) var j C.size_t for j = 0; C.nodes_get(cNodes, j).index != -1; j++ { cNode := C.nodes_get(cNodes, j) fVals = append(fVals, FeatureValue{int(cNode.index), float64(cNode.value)}) } if !fun(&TrainingInstance{label, fVals}) { break } } }
// Predict the label of an instance, given a model with probability // information. This method returns the label of the predicted class, // a map of class probabilities. Probability estimates are currently // given for logistic regression only. If another solver is used, // the probability of each class is zero. func (model *Model) PredictProbability(nodes []FeatureValue) (float64, map[int]float64, error) { // Allocate sparse C feature vector. cn := cNodes(nodes) defer C.nodes_free(cn) // Allocate C array for probabilities. cProbs := newProbs(model.model) defer C.free(unsafe.Pointer(cProbs)) r := C.predict_probability_wrap(model.model, cn, cProbs) // Store the probabilities in a slice labels := model.labels() probs := make(map[int]float64) for idx, label := range labels { probs[label] = float64(C.get_double_idx(cProbs, C.int(idx))) } return float64(r), probs, nil }
// Predict the label of an instance, given a model with probability // information. This method returns the label of the predicted class, // a map of class probabilities, and an error if the model was not // trained without the required information to do probability estimates. func (model *Model) PredictProbability(nodes []FeatureValue) (float64, map[int]float64, error) { if C.svm_check_probability_model_wrap(model.model) == 0 { return 0, nil, errors.New("Model was not trained to do probability estimates") } // Allocate sparse C feature vector. cn := cNodes(nodes) defer C.nodes_free(cn) // Allocate C array for probabilities. cProbs := C.probs_new(model.model) defer C.free(unsafe.Pointer(cProbs)) r := C.svm_predict_probability_wrap(model.model, cn, cProbs) // Store the probabilities in a slice labels := model.labels() probs := make(map[int]float64) for idx, label := range labels { probs[label] = float64(C.get_double_idx(cProbs, C.int(idx))) } return float64(r), probs, nil }