为了帮助您学习机器学习,我正在编写一些示例,不是为了说明一种方法优于另一种方法,而是说明如何使用各种功能以及要调整的参数。我从this blog开始比较BooST和xgboost,然后将gbm成功添加到示例中。现在,我尝试添加glmnet,但是返回的Alwyas模型的两个系数都(接近)零。我做错了什么,或者glmnet并不是此数据的正确算法。我试图弄清楚是哪个。这是我的可复制示例:
# Uncomment the following 2 lines if you need to install BooST (requires devtools)
#library(devtools)
#install_github("gabrielrvsc/BooST")
library(BooST)
library(xgboost)
library(gbm)
library(glmnet)
# Data generating process
dgp = function(N, r2){
X = matrix(rnorm(N*2,0,1),N,2)
X[,ncol(X)] = base::sample(c(0,1),N,replace=TRUE)
aux = X
yaux = cos(pi*(rowSums(X)))
vyaux = var(yaux)
ve = vyaux*(1-r2)/r2
e = rnorm(N,0,sqrt(ve))
y = yaux+e
return(list(y = y, X = X))
}
# Real data
x1r = rep(seq(-4,4,length.out = 1000), 2)
x2r = c(rep(0,1000), rep(1,1000))
yr = cos(pi*(x1r+x2r))
real_function = data.frame(x1 = x1r, x2 = as.factor(x2r), y = yr)
# Train data (noisy)
set.seed(1)
data = dgp(N = 1000, r2 = 0.5)
y = data$y
x = data$X
# Test data (noisy)
set.seed(2)
dataout=dgp(N = 1000, r2 = 0.5)
yout = dataout$y
xout = dataout$X
# Set seed and train all 4 models
set.seed(1)
BooST_Model = BooST(x, y, v = 0.18, M = 300 , display = TRUE)
xgboost_Model = xgboost(x, label = y, nrounds = 300, params = list(eta = 0.14, max_depth = 2))
gbm_Model = gbm.fit(x, y, distribution = "gaussian", n.trees = 10000, shrinkage = .001, interaction.depth=5)
glmnet_Model = cv.glmnet(x, y, family = "gaussian", alpha=0)
coef(glmnet_Model)
coef(glmnet_Model)
3个x 1类“ dgCMatrix”的稀疏矩阵 1
(拦截)0.078072154632597062784427066617354284971952438
V1 -0.000000000000000000000000000000000000000000003033534
V2 -0.000000000000000000000000000000000000000000044661342
# Predict from test data
p_BooST = predict(BooST_Model, xout)
p_xgboost = predict(xgboost_Model, xout)
p_gbm = predict(gbm_Model, xout, n.trees=10000)
p_glmnet = predict(glmnet_Model, xout)
# Show RMSE
sqrt(mean((p_BooST - yout)^2))
sqrt(mean((p_xgboost - yout)^2))
sqrt(mean((p_gbm - yout)^2))
sqrt(mean((p_glmnet - yout)^2))
fitted = data.frame(x1 = x[,1], x2 = as.factor(x[,2]),
BooST = fitted(BooST_Model),
xgboost = predict(xgboost_Model, x),
gbm = predict(object = gbm_Model, newdata = x, n.trees = 10000),
glmnet = predict(glmnet_Model, newx = x, s=glmnet_Model$lambda.min)[, 1], y = y)
# Plot noisy Y
ggplot() + geom_point(data = fitted, aes(x = x1, y = y, color = x2)) + geom_line(data = real_function, aes(x = x1, y = y, linetype = x2))
# Plot xgboost
ggplot() + geom_point(data = fitted, aes(x = x1, y = y), color = "gray") + geom_point(data = fitted, aes(x = x1, y = xgboost, color = x2)) + geom_line(data = real_function, aes(x = x1, y = y, linetype = x2))
# Plot BooST
ggplot() + geom_point(data = fitted, aes(x = x1, y = y), color = "gray") + geom_point(data = fitted, aes(x = x1, y = BooST, color = x2)) + geom_line(data = real_function, aes(x = x1, y = y, linetype = x2))
# Plot gbm
ggplot() + geom_point(data = fitted, aes(x = x1, y = y), color = "gray") + geom_point(data = fitted, aes(x = x1, y = gbm, color = x2)) + geom_line(data = real_function, aes(x = x1, y = y, linetype = x2))
# Plot glmnet
ggplot() + geom_point(data = fitted, aes(x = x1, y = y), color = "gray") + geom_point(data = fitted, aes(x = x1, y = glmnet, color = x2)) + geom_line(data = real_function, aes(x = x1, y = y, linetype = x2))
答案 0 :(得分:0)
我做错了什么
您不是,至少在编程方面不是
或者glmnet并不是该数据的正确算法
不是glmnet是“不正确的”(尽管它应该主要用于很多个预测变量的问题,而不仅仅是几个)。这是因为您的比较从根本上来说是“不公平的”并且不合适:您使用的所有其他3种算法都是 ensemble ,例如,您的gbm
包含 10,000( 10,000个)个人决策树...!尝试将 this 与单个回归变量(例如glmnet)进行比较,这就像将苹果与橙子进行比较...
尽管如此,这应该是一个很好的练习,并提醒您,从编程角度来看,所有这些工具似乎都是“等效的”(“嗯,我只使用library()
加载它们中的每一个,对吗?那么,为什么它们不应该是等价的和可比较的?”),隐藏了很多 ...这就是为什么至少要基本了解统计学习永远是一个好主意(我强烈建议免费使用Introduction to Statistical Learning,对于初学者-还包括R代码段)。
特别是Adaboost的合奏方法(这是您在此使用的所有其他3种算法的统一元素),不是在开玩笑!当它问世时(真正的游戏改变者)(深度学习时代之前大约十年),在xgboost
实现中,它仍然是大多数Kaggle竞赛中的获胜选择。涉及“传统的”结构化数据(即没有文本或图像)。
答案 1 :(得分:0)
请记住glmnet符合 linear 模型,这意味着响应可以写为预测变量的线性组合:
y = b0 + b1*x1 + b2*x2 + ...
在数据集中,您将响应定义为各种方式
yaux = cos(pi*(rowSums(X))) yr = cos(pi*(x1r+x2r))
这两种情况显然都不是预测变量的线性组合。