Java Code Examples for ml.dmlc.xgboost4j.java.Booster#saveModel()
The following examples show how to use
ml.dmlc.xgboost4j.java.Booster#saveModel() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: DependencyGraghEdgeCostTrain.java From SmoothNLP with GNU General Public License v3.0 | 5 votes |
public static void trainXgbModel(String trainFile, String devFile, String modelAddr, int nround, int negSampleRate, int earlyStop, int nthreads) throws IOException{ final DMatrix trainMatrix = readCoNLL2DMatrix(trainFile,negSampleRate); final DMatrix devMatrix = readCoNLL2DMatrix(devFile,negSampleRate); try{ Map<String, Object> params = new HashMap<String, Object>() { { put("nthread", nthreads); put("max_depth", 16); put("silent", 0); put("objective", "binary:logistic"); put("colsample_bytree",0.95); put("colsample_bylevel",0.95); put("eta",0.2); put("subsample",0.95); put("lambda",0.2); put("min_child_weight",5); put("scale_pos_weight",negSampleRate); // other parameters // "objective" -> "multi:softmax", "num_class" -> "6" put("eval_metric", "logloss"); put("tree_method","approx"); } }; Map<String, DMatrix> watches = new HashMap<String, DMatrix>() { { put("train", trainMatrix); put("dev",devMatrix); } }; Booster booster = XGBoost.train(trainMatrix, params, nround, watches, null, null,null,earlyStop); OutputStream outstream = SmoothNLP.IOAdaptor.create(modelAddr); booster.saveModel(outstream); }catch(XGBoostError e){ System.out.println(e); } }
Example 2
Source File: DependencyGraphRelationshipTagTrain.java From SmoothNLP with GNU General Public License v3.0 | 4 votes |
public static void trainXgbModel(String trainFile, String devFile, String modelAddr, int nround, int earlyStop,int nthreads ) throws IOException{ final DMatrix trainMatrix = readCoNLL2DMatrix(trainFile); final DMatrix devMatrix = readCoNLL2DMatrix(devFile); try{ Map<String, Object> params = new HashMap<String, Object>() { { put("nthread", nthreads); put("max_depth", 12); put("silent", 0); put("objective", "multi:softprob"); put("colsample_bytree",0.90); put("colsample_bylevel",0.90); put("eta",0.2); put("subsample",0.95); put("lambda",1.0); // tree methods for regulation put("min_child_weight",5); put("max_leaves",128); // other parameters // "objective" -> "multi:softmax", "num_class" -> "6" put("eval_metric", "merror"); put("tree_method","approx"); put("num_class",tag2float.size()); put("min_child_weight",5); } }; Map<String, DMatrix> watches = new HashMap<String, DMatrix>() { { put("train", trainMatrix); put("dev",devMatrix); } }; Booster booster = XGBoost.train(trainMatrix, params, nround, watches, null, null,null,earlyStop); OutputStream outstream = SmoothNLP.IOAdaptor.create(modelAddr); booster.saveModel(outstream); }catch(XGBoostError e){ System.out.println(e); } }