in liblinear/liblinearutil.py [0:0]
def train(arg1, arg2=None, arg3=None):
"""
train(y, x [, options]) -> model | ACC
y: a list/tuple/ndarray of l true labels (type must be int/double).
x: 1. a list/tuple of l training instances. Feature vector of
each training instance is a list/tuple or dictionary.
2. an l * n numpy ndarray or scipy spmatrix (n: number of features).
train(prob [, options]) -> model | ACC
train(prob, param) -> model | ACC
Train a model from data (y, x) or a problem prob using
'options' or a parameter param.
If '-v' is specified in 'options' (i.e., cross validation)
either accuracy (ACC) or mean-squared error (MSE) is returned.
options:
-s type : set type of solver (default 1)
for multi-class classification
0 -- L2-regularized logistic regression (primal)
1 -- L2-regularized L2-loss support vector classification (dual)
2 -- L2-regularized L2-loss support vector classification (primal)
3 -- L2-regularized L1-loss support vector classification (dual)
4 -- support vector classification by Crammer and Singer
5 -- L1-regularized L2-loss support vector classification
6 -- L1-regularized logistic regression
7 -- L2-regularized logistic regression (dual)
for regression
11 -- L2-regularized L2-loss support vector regression (primal)
12 -- L2-regularized L2-loss support vector regression (dual)
13 -- L2-regularized L1-loss support vector regression (dual)
-c cost : set the parameter C (default 1)
-p epsilon : set the epsilon in loss function of SVR (default 0.1)
-e epsilon : set tolerance of termination criterion
-s 0 and 2
|f'(w)|_2 <= eps*min(pos,neg)/l*|f'(w0)|_2,
where f is the primal function, (default 0.01)
-s 11
|f'(w)|_2 <= eps*|f'(w0)|_2 (default 0.0001)
-s 1, 3, 4, and 7
Dual maximal violation <= eps; similar to liblinear (default 0.)
-s 5 and 6
|f'(w)|_inf <= eps*min(pos,neg)/l*|f'(w0)|_inf,
where f is the primal function (default 0.01)
-s 12 and 13
|f'(alpha)|_1 <= eps |f'(alpha0)|,
where f is the dual function (default 0.1)
-B bias : if bias >= 0, instance x becomes [x; bias]; if < 0, no bias term added (default -1)
-wi weight: weights adjust the parameter C of different classes (see README for details)
-v n: n-fold cross validation mode
-C : find parameters (C for -s 0, 2 and C, p for -s 11)
-n nr_thread : parallel version with [nr_thread] threads (default 1; only for -s 0, 1, 2, 3, 11)
-q : quiet mode (no outputs)
"""
prob, param = None, None
if isinstance(arg1, (list, tuple)) or (scipy and isinstance(arg1, scipy.ndarray)):
assert isinstance(arg2, (list, tuple)) or (scipy and isinstance(arg2, (scipy.ndarray, sparse.spmatrix)))
y, x, options = arg1, arg2, arg3
prob = problem(y, x)
param = parameter(options)
elif isinstance(arg1, problem):
prob = arg1
if isinstance(arg2, parameter):
param = arg2
else:
param = parameter(arg2)
if prob == None or param == None :
raise TypeError("Wrong types for the arguments")
prob.set_bias(param.bias)
liblinear.set_print_string_function(param.print_func)
err_msg = liblinear.check_parameter(prob, param)
if err_msg :
raise ValueError('Error: %s' % err_msg)
if param.flag_find_parameters:
nr_fold = param.nr_fold
best_C = c_double()
best_p = c_double()
best_score = c_double()
if param.flag_C_specified:
start_C = param.C
else:
start_C = -1.0
if param.flag_p_specified:
start_p = param.p
else:
start_p = -1.0
liblinear.find_parameters(prob, param, nr_fold, start_C, start_p, best_C, best_p, best_score)
if param.solver_type in [L2R_LR, L2R_L2LOSS_SVC]:
print("Best C = %g CV accuracy = %g%%\n"% (best_C.value, 100.0*best_score.value))
elif param.solver_type in [L2R_L2LOSS_SVR]:
print("Best C = %g Best p = %g CV MSE = %g\n"% (best_C.value, best_p.value, best_score.value))
return best_C.value,best_p.value,best_score.value
elif param.flag_cross_validation:
l, nr_fold = prob.l, param.nr_fold
target = (c_double * l)()
liblinear.cross_validation(prob, param, nr_fold, target)
ACC, MSE, SCC = evaluations(prob.y[:l], target[:l])
if param.solver_type in [L2R_L2LOSS_SVR, L2R_L2LOSS_SVR_DUAL, L2R_L1LOSS_SVR_DUAL]:
print("Cross Validation Mean squared error = %g" % MSE)
print("Cross Validation Squared correlation coefficient = %g" % SCC)
return MSE
else:
print("Cross Validation Accuracy = %g%%" % ACC)
return ACC
else:
m = liblinear.train(prob, param)
m = toPyModel(m)
return m