@@ -211,11 +211,10 @@ fitXGBoostModel <- function(labels.train, data.train, nrounds = 50,
211211 data.train.cv.test <- data.train [- mySample ,]
212212 labels.train.cv.test <- labels.train [- mySample ]
213213
214- cv.fit <- xgboost(data = data.train.cv ,
215- label = labels.train.cv , nthread = 1 , nrounds = nrounds ,
214+ cv.fit <- xgboost(x = data.train.cv ,
215+ y = labels.train.cv , nthread = 1 , nrounds = nrounds ,
216216 objective = " binary:logistic" ,
217- eval_metric = ' error' ,
218- verbose = 0 )
217+ eval_metric = ' error' )
219218 predictions <- predict(cv.fit , data.train.cv.test )
220219 message(' prediction accuracy (CV) (higher for splice ' ,
221220 ' donor than splice acceptor)' )
@@ -229,11 +228,10 @@ fitXGBoostModel <- function(labels.train, data.train, nrounds = 50,
229228 message(" AUC: " , evaluatePerformance(labels.train.cv.test == 1 ,predictions )$ AUC )
230229 }
231230
232- cv.fit <- xgboost(data = data.train ,
233- label = labels.train , nthread = 1 , nrounds = nrounds ,
231+ cv.fit <- xgboost(x = data.train ,
232+ y = labels.train , nthread = 1 , nrounds = nrounds ,
234233 objective = " binary:logistic" ,
235- eval_metric = ' error' ,
236- verbose = 0 )
234+ eval_metric = ' error' )
237235
238236 return (cv.fit )
239237}
0 commit comments