我无法在不同的分类模型上获得重新采样的结果
在bwplot中我总是有这个错误
%模型中的模型%出错:对象'模型'找不到
当我训练模型时,一切都很好
重新采样仅返回:
致电:resamples.default(x = lst_model)
模型:ADA,RDF,MAR,SVM,NNET
重新采样数量:10
绩效指标:
时间估计:一切,最终模型适合
这是我的代码
set.seed(7777)
cctrlg <- trainControl(method = "cv", number = 10, returnResamp = "final",
classProbs = TRUE, summaryFunction = twoClassSummary)
model_glm_cv= train(var.imp.formula,data=data.train, method ="glm", family= binomial,preProc = c("center", "scale"),trControl=cctrlg)
#Modele MARS
set.seed(7777)
cctrle <- trainControl(method = "cv", number = 10, returnResamp = "final", classProbs = TRUE)
marsGrid <- expand.grid(nprune = c(2, 4, 6, 8, 10), degree = c(1, 2, 3))
model_mars3_cv = train( var.imp.formula, data=data.train, method='earth', tuneGrid = marsGrid, trControl =cctrle,preProc = c("center",
#Modele random forest
set.seed(7777)
seeds <- vector(mode = "list", length = nrow(data.train) + 1)
seeds <- lapply(seeds, function(x) 1:20)
cctrl1 <- trainControl(method = "cv", number = 10, returnResamp = "final",
classProbs = TRUE,
summaryFunction = twoClassSummary,
seeds = seeds)
model_rf_cv = train(var.imp.formula, data=data.train, method='rf', ntree=400, tuneLength=5, trControl = cctrl1, preProc = c("center",
#Modele ADAboost M1
set.seed(7777)
gridAda <- expand.grid(mfinal = (1:3)*3, maxdepth = c(1, 3),coeflearn = c("Breiman", "Freund", "Zhu"))
cctrlada <- trainControl(method = "cv", number = 10, returnResamp = "final", classProbs = TRUE,
summaryFunction = twoClassSummary, seeds = seeds)
model_ada_cv= train(var.imp.formula, data=data.train, method='AdaBoost.M1', tuneLength=2 ,tuneGrid = gridAda,trControl = cctrlada, preProc
"scale"))
#Modele XGB
set.seed(7777)
xgbGrid <- expand.grid(nrounds = c(1, 10),
max_depth = 2,
eta = 0.30,
rate_drop = 0.10,
skip_drop = 0.10,
colsample_bytree = 0.90,
min_child_weight = 2,
subsample = 0.75,
gamma = 0.10)
cctrlx <- trainControl(method = "cv", number = 10, returnResamp = "final", classProbs = TRUE, summaryFunction = twoClassSummary)
model_xgb_cv= train(var.imp.formula, data=data.train, method='xgbDART', tuneLength=5, tuneGrid=xgbGrid,trControl = cctrlx, verbose=F,
"scale"))
#modele nnet
set.seed(7777)
cctrlnnet <- trainControl(method = "cv", number = 10, returnResamp = "final", classProbs = T)
model_nnet_cv <- train( var.imp.formula, data=data.train, method = "nnet", trControl = cctrlnnet,
preProc = c("center", "scale"),trace = FALSE)
#SVM Radiale
set.seed(7777)
cctrlsv <- trainControl(method = "cv", number = 10, returnResamp = "final", classProbs = T)
model_svmRadial_cv = train(var.imp.formula, data=data.train, method='svmRadial', tuneLength=15, tuneGrid = data.frame(.C = c(.25, .5, 1),
.sigma = .05), trControl = cctrlsv, preProc = c("center", "scale"))
#"prepare liste de comparaison et les compare avec resamples
lst_model<-list(ADA=model_ada_cv, RDF=model_rf_cv,
MAR=model_mars3_cv, SVM=model_svmRadial_cv,
NNET=model_nnet_cv)#,XGB=model_xgb_cv, RGL=model_glm_cv)
lst_model
models_compare <- caret::resamples(lst_model)
sum_model<-summary(models_compare)
sum_model
# bwplots de comparaison des modeles
scales <- list(x=list(relation="free"), y=list(relation="free"))
bwplot(models_compare, scales=scales)