具有1L罚分和10倍交叉验证的逻辑回归

时间:2016-12-07 21:11:37

标签: r logistic-regression lasso

我试图在训练集上应用带有L1惩罚的逻辑回归。我需要使用10倍交叉验证来找到惩罚参数的最佳值。任何人都可以告诉我为什么我使用x1-x3获得与x1-x14模型相同的结果?

数据:

GenerateData <- function( seed )
{
  set.seed( seed )

  # generate X
  x1 = runif(1500, -2, 2)
  x2 = rnorm(1500, mean = 4, sd = sqrt(2))
  x3 = rnorm(1500, mean = 0, sd = 1)
  x4 = runif(1500, -2, 2)
  x5 = runif(1500, -2, 2)
  x6 = runif(1500, -2, 2)
  x7 = runif(1500, -2, 2)
  x8 = runif(1500, -2, 2)
  x9 = runif(1500, -2, 2)
  x10 = runif(1500, -2, 2)
  x11 = runif(1500, -2, 2)
  x12 = runif(1500, -2, 2)
  x13 = runif(1500, -2, 2)
  x14 = runif(1500, -2, 2)

  # generate Y
  eta = -1.5 + (1.5*x1) + (0.85*x1^2) - (0.20*x1^3) + (2.5*I(x2<0)) + I(x2>3) + (0.3*x3)
  pi = exp(eta) / ( 1 + exp(eta) )
  y = rbinom(1500, 1, pi)

  # combine data and split into training and test data
  full = data.frame(y, x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14)
  names(full) = c( "y" ,  paste( "x" , 1:14 , sep="" ) )

  return(full)
}

source( "GenerateData.R" )
FullData = GenerateData( 1035932 )
train = FullData[ 1:750 , ]
test = FullData[ 751:1500 , ]

对于x1-x3

x = model.matrix( y ~ .-1 + x1 + x2 + x3, data=FullData )
y = FullData$y
nFullData = nrow(FullData)
###

###
nFolds = 10
set.seed( 500 )
CVfolds = sample( rep( 1:nFolds , length=nFullData ) , nFullData ) 
cv_errors_ridge = matrix( -9999 , 1 , nFolds )
cv_errors_lasso = matrix( -9999 , 1 , nFolds )

nFolds_inner = 10
for( fold in 1:nFolds )
{
  xtrain = x[ CVfolds!=fold , ]
  ytrain = y[ CVfolds!=fold ]
  xtest = x[ CVfolds==fold , ]
  ytest = y[ CVfolds==fold ]

  cv_ridge = cv.glmnet( xtrain , ytrain , alpha=0 , nfolds = nFolds_inner ) 
  cv_ridge_train = glmnet( xtrain , ytrain , alpha=0 , lambda=cv_ridge$lambda.min )
  predict_ridge_train = predict( cv_ridge_train , xtest )
  cv_errors_ridge[ fold ] = sqrt( apply( ( ytest - predict_ridge_train )^2 , 2 , mean ) )

  cv_lasso = cv.glmnet( xtrain , ytrain , alpha=1 , nfolds = nFolds_inner ) 
  cv_lasso_train = glmnet( xtrain , ytrain , alpha=1 , lambda=cv_lasso$lambda.min )
  predict_lasso_train = predict( cv_lasso_train , xtest )
  cv_errors_lasso[ fold ] = sqrt( apply( ( ytest - predict_lasso_train )^2 , 2 , mean ) )    
}

mean( cv_errors_ridge )
mean( cv_errors_lasso )

coef( cv.glmnet( x , y , alpha=1 , nfolds = nFolds ) )

对于x1-x14

x = model.matrix( y ~ .-1 + x1 + x2 + x3 + x4 + x5 +x6 + x7 + x8 + x9 + x10 + x11
                  + x12 + x13 + x14, data=FullData )
y = FullData$y

###
nFolds = 10
set.seed( 500 )
CVfolds = sample( rep( 1:nFolds , length=nFullData ) , nFullData ) 
cv_errors_ridge = matrix( -9999 , 1 , nFolds )
cv_errors_lasso = matrix( -9999 , 1 , nFolds )

nFolds_inner = 10
for( fold in 1:nFolds )
{
  xtrain = x[ CVfolds!=fold , ]
  ytrain = y[ CVfolds!=fold ]
  xtest = x[ CVfolds==fold , ]
  ytest = y[ CVfolds==fold ]

  cv_ridge = cv.glmnet( xtrain , ytrain , alpha=0 , nfolds = nFolds_inner ) 
  cv_ridge_train = glmnet( xtrain , ytrain , alpha=0 , lambda=cv_ridge$lambda.min )
  predict_ridge_train = predict( cv_ridge_train , xtest )
  cv_errors_ridge[ fold ] = sqrt( apply( ( ytest - predict_ridge_train )^2 , 2 , mean ) )

  cv_lasso = cv.glmnet( xtrain , ytrain , alpha=1 , nfolds = nFolds_inner ) 
  cv_lasso_train = glmnet( xtrain , ytrain , alpha=1 , lambda=cv_lasso$lambda.min )
  predict_lasso_train = predict( cv_lasso_train , xtest )
  cv_errors_lasso[ fold ] = sqrt( apply( ( ytest - predict_lasso_train )^2 , 2 , mean ) )    
}

mean( cv_errors_ridge )
mean( cv_errors_lasso )

现在尝试另一种方式,但最终仍然获得x1-x3和x1-x14的相同值:

x <- model.matrix(y ~ .-1 + x1 + x2 + x3, data=FullData)
y <- as.double(as.matrix(FullData[, 1]))

# Fitting the model (Lasso: Alpha = 1)
set.seed(999)
cv.lasso <- cv.glmnet(x, y, family='binomial', alpha=1, parallel=TRUE, standardize=TRUE, type.measure='auc')

# Results
plot(cv.lasso)
plot(cv.lasso$glmnet.fit, xvar="lambda", label=TRUE)
cv.lasso$lambda.min
cv.lasso$lambda.1se
coef(cv.lasso, s=cv.lasso$lambda.min)


############################################################

x <- model.matrix(y ~ .-1 + x1 + x2 + x3 +x4 + x5 + x6 + x7
                  + x8 +x9 + x10 + x11 + x12 + x13 + x14, data=FullData)
y <- as.double(as.matrix(FullData[, 1]))

# Fitting the model (Lasso: Alpha = 1)
set.seed(999)
cv.lasso <- cv.glmnet(x, y, family='binomial', alpha=1, parallel=TRUE, standardize=TRUE, type.measure='auc')

# Results
plot(cv.lasso)
plot(cv.lasso$glmnet.fit, xvar="lambda", label=TRUE)
cv.lasso$lambda.min
cv.lasso$lambda.1se
coef(cv.lasso, s=cv.lasso$lambda.min)

0 个答案:

没有答案