如何用L-BFGS和R进行逻辑回归?

时间:2015-05-11 08:41:31

标签: r algorithm optimization machine-learning logistic-regression

我正在尝试用L-BFGS和R进行逻辑回归。 这是我的数据集(包含14个变量的390个障碍,Y是目标变量)

GEST    DILATE    EFFACE    CONSIS    CONTR    MEMBRAN    AGE    STRAT    GRAVID    PARIT    DIAB    TRANSF    GEMEL    Y
31           3       100         3        1         2     26         3         1        0       2         2       1     1
28           8         0         3        1         2     25         3         1        0       2         1       2     1
31           3       100         3        2         2     28         3         2        0       2         1       1     1
...

此数据集位于“Données:prematures.xls”中的http://tutoriels-data-mining.blogspot.fr/2008/04/rgression-logistique-binaire.html。 Y是我在Excel中使用“PREMATURE”列创建的列,Y = IF(PREMATURE =“positif”; 1; 0)

我尝试过像https://stats.stackexchange.com/questions/17436/logistic-regression-with-lbfgs-solver一样使用optimx包,这里是代码:

install.packages("optimx")
  library(optimx)

vY = as.matrix(premature['Y'])
mX = as.matrix(premature[c('GEST','DILATE','EFFACE','CONSIS','CONTR','MEMBRAN','AGE','STRAT','GRAVID','PARIT','DIAB','TRANSF','GEMEL')])

#add an intercept to the predictor variables
mX = cbind(rep(1, nrow(mX)), mX)

#the number of variables and observations
iK = ncol(mX)
iN = nrow(mX)

#define the logistic transformation
logit = function(mX, vBeta) {
return(exp(mX %*% vBeta)/(1+ exp(mX %*% vBeta)) )}

# stable parametrisation of the log-likelihood function
logLikelihoodLogitStable = function(vBeta, mX, vY) {
  return(-sum(
    vY*(mX %*% vBeta - log(1+exp(mX %*% vBeta)))
    + (1-vY)*(-log(1 + exp(mX %*% vBeta)))
  )  # sum
  )  # return 
}

# score function
likelihoodScore = function(vBeta, mX, vY) {
  return(t(mX) %*% (logit(mX, vBeta) - vY) )
    }

# initial set of parameters (arbitrary starting parameters)
vBeta0 = c(10, -0.1, -0.3, 0.001, 0.01, 0.01, 0.001, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01)

optimLogitLBFGS = optimx(vBeta0, logLikelihoodLogitStable, method = 'L-BFGS-B',gr = likelihoodScore, mX = mX, vY = vY, hessian=TRUE)

这是错误:

Error in optimx.check(par, optcfg$ufn, optcfg$ugr, optcfg$uhess, lower,  : Cannot evaluate function at initial parameters

1 个答案:

答案 0 :(得分:0)

根据您的数据,我得到:

optimLogitLBFGS
#                p1         p2       p3         p4         p5         p6
# L-BFGS-B 9.720242 -0.1652943 0.525449 0.01681583 0.02781123 -0.3921004
#                 p7          p8         p9       p10        p11        p12
# L-BFGS-B -1.694412 -0.03461208 0.02759248 0.1993573 -0.6718275 0.02537887
#                 p13      p14   value fevals gevals niter convcode  kkt1  kkt2
# L-BFGS-B -0.8374338 0.625044 187.581    121    121    NA        1 FALSE FALSE
#          xtimes
# L-BFGS-B  0.044

使用的代码,经过轻微修改后直接创建vY而不使用Excel:

library(readxl)  # used to read xls file
library(optimx)

premature <- read_excel("prematures.xls")

vY = as.matrix(premature['PREMATURE'])
# Recoding the response variable
vY = ifelse(vY == "positif", 1, 0)

mX = as.matrix(premature[c('GEST', 'DILATE', 'EFFACE', 'CONSIS', 'CONTR', 
                           'MEMBRAN', 'AGE', 'STRAT', 'GRAVID', 'PARIT', 
                           'DIAB', 'TRANSF', 'GEMEL')])

#add an intercept to the predictor variables
mX = cbind(rep(1, nrow(mX)), mX)

#the number of variables and observations
iK = ncol(mX)
iN = nrow(mX)

#define the logistic transformation
logit = function(mX, vBeta) {
  return(exp(mX %*% vBeta)/(1+ exp(mX %*% vBeta)) )
}

# stable parametrisation of the log-likelihood function
logLikelihoodLogitStable = function(vBeta, mX, vY) {
  return(-sum(
    vY*(mX %*% vBeta - log(1+exp(mX %*% vBeta)))
    + (1-vY)*(-log(1 + exp(mX %*% vBeta)))
  )  # sum
  )  # return 
}

# score function
likelihoodScore = function(vBeta, mX, vY) {
  return(t(mX) %*% (logit(mX, vBeta) - vY) )
}

# initial set of parameters (arbitrary starting parameters)
vBeta0 = c(10, -0.1, -0.3, 0.001, 0.01, 0.01, 0.001, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01)

optimLogitLBFGS = optimx(vBeta0, logLikelihoodLogitStable,
                         method = 'L-BFGS-B', gr = likelihoodScore, 
                         mX = mX, vY = vY, hessian=TRUE)

# Warning in optimx.check(par, optcfg$ufn, optcfg$ugr, optcfg$uhess, lower,  :
#  Parameters or bounds appear to have different scalings.
#  This can cause poor performance in optimization. 
#  It is important for derivative free methods like BOBYQA, UOBYQA, NEWUOA.