R code for fitting a 3-parameter lognormal model using the correct likelihood

When fitting a three-parameter lognormal model the likelihood may approach infinity (see figure below). This unboundedness of the likelihood occurs when the threshold parameter of the three-parameter lognormal model approaches the smallest observed failure time. A possible remedy, such that the likelihood becomes bounded again, is using the correct likelihood.
For some data sets it is necessary to resort to a remedy such as the correct likelihood. This is because the unboundedness of the likelihood might prevent a numerical optimization algorithm from finding the correct model parameters.

Lognormal 3 parameter correct likelihood

Example of an unbounded likelihood: The likelihood goes to infinity when the threshold parameter (=gamma) approaches the smallest failure time (marked by red line).

The following R code implements the correct likelihood for a 3-parameter lognormal distribution. The code may be used to fit the distribution to (right) censored or complete (uncensored) data in R.

An introduction to the correct likelihood is given by Jerry Lawless in his 2003 book Statistical models and methods for lifetime data (p. 186), and by Meeker and Escobar in their 1998 book Statistical Methods for Reliability Data (pp. 275-283). Liu, Wu, and Meeker also discuss the problem of the unbounded likelihood (and its remedies) in this paper.

Suggestions and/or questions? Please contact Stefan Gelissen (email: info at datall-analyse.nl).
See this page for an overview of all of Stefan’s R code blog posts.

R code


#Fan data from Meeker & Escobar (1998), Example 11.17, p. 277
fan <- read.table("http://www.public.iastate.edu/~wqmeeker/anonymous/Stat533_data/Splida_text_data/Fan.txt", header=T)
#failure=1, censored=0
fan$Cens <- as.numeric(fan$Status)-1
#expand data
fan2 <- fan[rep(seq_len(nrow(fan)), fan$Weight),]

times <- fan2$Hours
event <- fan2$Cens

##unbounded likelihood

#profile function for threshold (=gamma)
profile.t <- function(gammahat, times, event) {
  time <- times-gammahat
  fit <- survreg(Surv(time, event) ~ 1, dist="lognormal", subset=(time>0))

#note: for an estimate of gamma which coincides with the maximum in the profile likelihood
#plot (see below) you may sometimes have to vary the value of 1e+2 (e.g., set it to 1e+3)
gammaMLE <- optim(par=0, profile.t, method="L-BFGS-B",
                  times=times, event=event)
gammaI <- gammaMLE$par
names(gammaI) <- "gamma"

#MLEs for mu and sigma
sgamma <- Surv(times-gammaI, event)
musigma <- survreg(sgamma ~ 1, dist="lognormal", subset=(times-gammaI>0))

muI <- coef(musigma)
names(muI) <- "mu"
sigmaI <- musigma$scale
names(sigmaI) <- "sigma"

#summary MLEs of model parameters
c(muI, sigmaI, gammaI)
(loglikw <- musigma$loglik[1])

#likelihood based interval for gamma

#MLE for gamma
#fixed values of gamma
gammas <- seq(-100, min(times[event==1])-1e-2, length.out=100)
#likelihood profile
ll <- sapply(gammas, profile.t, times=times, event=event)

#profile likelihood plot
#note: the likelihood blows up (=unbounded likelihood)
#as gamma approaches smallest observed time
plot(gammas,ll, type='l', xlab=expression(gamma), ylab="log-likelihood")
#MLE of gamma
abline(v=gammaI, lty=2, col="red")
#smallest observed times
abline(v=min(times[event==1]), lty=2, col="darkgray")

##correct likelihood

#construct data for correct likelihood
#assume a measurement error of +/- 5, thus delta=5
delta <- 5
timesL <- ifelse(event==1, times-delta, times)
timesU <- ifelse(event==1, times+delta, NA)

#profile function for threshold (=gamma)
profile.tc <- function(gammahat, timesL, timesU) {
  timeLt <- timesL-gammahat
  timeUt <- timesU-gammahat
  fit <- survreg(Surv(timeLt, timeUt, type="interval2") ~ 1,
                 dist="lognormal", subset=(timeLt>0))

#note: for an estimate of gamma which coincides with the maximum in the profile likelihood
#plot (see below) you may sometimes have to vary the value of 1e+2 (e.g., set it to 1e+3)
gammaMLEc <- optim(par=0, profile.tc, method="L-BFGS-B",
                   timesL=timesL, timesU=timesU)

gammaIc <- gammaMLEc$par
names(gammaIc) <- "gamma"

#MLEs for mu and sigma
sgammac <- Surv(timesL-gammaIc, timesU-gammaIc, type="interval2")
musigmac <- survreg(sgammac ~ 1, dist="lognormal", subset=(timesL-gammaIc>0))

muIc <- coef(musigmac)
names(muIc) <- "mu"
sigmaIc <- musigmac$scale
names(sigmaIc) <- "sigma"

#summary MLEs of model parameters
#note: these MLEs are identical to those reported by Meeker & Escobar (p. 277)
c(muIc, sigmaIc, gammaIc)
(loglikw <- musigmac$loglik[1])

#log-likelihood function of 3-p lognormal model (implementing correct likelihood)
lliklnormal <- function (theta, yl, yu, d) {
  mu <- theta[1]
  sigma <- exp(theta[2])
  gamma <- theta[3]
  subset <- yl-gamma>0
  yls <- yl[subset]
  yus <- yu[subset]
  ds <- d[subset]
  sum(log(( ( pnorm(((log(yus-gamma)-mu)/sigma)) -
                pnorm(((log(yls-gamma)-mu)/sigma)) ) ^ds)*
            ( (1-pnorm( ((log(yls-gamma)-mu)/sigma) )) ^(1-ds) )))

#optimization of log-likelihood
#log transformation of sigma ensures that sigma takes a positive value
lognormal.mle <- optim(c(muIc, log(sigmaIc), gammaIc),
                       method='BFGS', control=list(fnscale=-1),
                       yl=timesL, yu=timesU, d=event,

#MLEs for mu, sigma, and gamma
c(lognormal.mle$par[1], exp(lognormal.mle$par[2]),lognormal.mle$par[3])
#compare with start values
c(muIc, sigmaIc, gammaIc)
#variance covariance matrix
#standard errors
SEparms <- sqrt(diag(solve(-1*lognormal.mle$hessian)))

#normal-approximation 95% confidence intervals
#these intervals are also known as Fisher matrix interval bounds
#95% confidence interval for mu
lognormal.mle$par[1] + c(1,-1)*qnorm(.05/2)*SEparms[1]
#95% confidence interval for sigma
exp(lognormal.mle$par[2] + c(1,-1)*qnorm(.05/2)*SEparms[2])
#95% confidence interval for gamma
lognormal.mle$par[3] + c(1,-1)*qnorm(.05/2)*SEparms[3]

#likelihood based interval for gamma

#MLE for gamma
#fixed values of gamma
gammas <- seq(-100, min(timesL[event==1])-1e-10, length.out=100)
#likelihood profile
llc <- sapply(gammas, profile.tc, timesL=timesL, timesU=timesU)

#profile likelihood plot
#note: the likelihood doesn't blow up anymore as gamma approaches smallest observed time
plot(gammas,llc, type='l', xlab=expression(gamma), ylab="log-likelihood")
#MLE of gamma
abline(v=gammaIc, lty=2, col="red")
#minimum of observed times
abline(v=min(timesL[event==1]), lty=2, col="darkgray")

#predictions of 3-parameter lognormal model fitted by correct likelihood

#calculate probability plotting positions of observed failures
timesSorted <- times[order(times)]
eventSorted <- event[order(times)]
timesi <- unique(timesSorted[which(eventSorted==1)])

cumSurvRaw <- survfit(Surv(times, event)~ 1)
cumSurv <- unique(rep(cumSurvRaw$surv, cumSurvRaw$n.event))
cumFail <- 1-cumSurv
lagFail <- c(0, cumFail[-length(cumFail)])
Prob <- .5*(cumFail+lagFail)
yi <- qnorm(Prob)

timesPred <- seq(min(times[event==1])-1e-2, max(times[event==1]),
pred3p <- function (timesPred, mu, sigma, gamma) {
  pnorm( (log(timesPred-gamma)-mu) / sigma)}

predProb <- sapply(timesPred, pred3p, mu=muIc, sigma=sigmaIc, gamma=gammaIc)
pi <- qnorm(predProb)

#probability plot including predictions
plot(timesi, yi, log="x", xlab="Time", ylab="Linearized CDF",
     xlim=c(200, 10000))
lines(timesPred, pi, col="blue")
#include threshold (=gamma)
abline(v=gammaIc, col="red", lty=2)