Ah, perfect! Thanks so much Ken.

In the meantime I played with developing an optim() driven ML search
(included below for posterity), but the glm() + mafc approach is faster and
apparently yields identical results.

########
# First generate some data
########

# Define a modified logistic function
mod.logistic = function(x,location,scale) plogis(x,location,scale)*.5 + .5

# Define the scale and location of the modified logistic to be fit
location = 40
scale = 5

# Choose some x values
x = runif(200,0,100)

# Generate some random noise to add to x in order to
# simulate real-word measurement and avoid perfect fits
x.noise = runif(length(x),-10,10)

# Define the probability of success for each x given the modified logistic
prob.success = mod.logistic(x+x.noise,location,scale)

# Obtain y, the observed success/failure at each x
y = rep(NA,length(x))
for(i in 1:length(x)){
    y[i] = sample(
        x = c(1,0)
        , size = 1
        , prob = c(prob.success[i], 1-prob.success[i])
    )
}

# Show the data and the source function
plot(x,y,ylab='Accuracy',xlab='Stimulus Duration (ms)',pch=3)
abline(h=.5,lty=3,col='grey',lwd=2)
curve(mod.logistic(x,location,scale),add=T)

########
# Now try to fit the data
########

# Fit using glm with the standard binomial link function
# and plot the result (obviously bad)
glm.binom.fit = glm(y ~ x , family='binomial')
curve(plogis(glm.binom.fit$coefficients[1]+x*glm.binom.fit$coefficients[2])*.5+.5,add=T,col='brown',lty=2)


# Define a function that obtains the sum log liklihood of
# data given a set of mod.logistic parameters
lik.mod.logistic = function(par,y,x){
    size  = length(x)
    lik = rep(NA,size)
    exp = mod.logistic(x,par[1],par[2])
    for(i in 1:size){
        lik[i] = ifelse(y[i]==1,exp[i],1-exp[i])
    }
    sl = sum(log(lik))
    if(!is.finite(sl)){
        sl=-99999999
    }
    return(sl)
}

# Fit the data using optim whilst keeping a record of how
# much time this takes. Starting points may need to be
# modfied, as may the limits.
optim.start=proc.time()[1]
from.optim = optim(
    par = c(
        location = sum(x*y)/sum(y)
        , scale = sum(x*y)/sum(y)
    )
    , lik.mod.logistic
    , method='L'
    , lower=c(1,1)
    , upper=c(1000,1000)
    , y=y
    , x=x
    , control=list(fnscale=-1)
)
optim.time=proc.time()[1]-optim.start

# Add the fit to the plot
curve(mod.logistic(x,from.optim$par[1],from.optim$par[2]),col='red',lty=2,lwd=2,add=T)


# Fit the data using glm and the mafc.logit link function
# provided in the psyphy package.
library(psyphy)
glm.mafc.logit.start=proc.time()[1]
glm.mafc.logit.fit = glm(y ~ x, binomial(mafc.logit(2))
)
glm.mafc.logit.time=proc.time()[1]-glm.mafc.logit.start

# Add the fit to the plot
x.ord <- order(x)
lines(x[x.ord], fitted(glm.mafc.logit.fit)[x.ord], col = "green", lty=3, lwd
= 2)

# Compare the time taken by the optim and
# glm(y ~ x, binomial(mafc.logit(2)) procedures
print(c(optim.time,glm.mafc.logit.start))


-- 
Mike Lawrence
Graduate Student
Department of Psychology
Dalhousie University
www.thatmike.com

Looking to arrange a meeting? Check my public calendar and find a time that
is mutually free:
http://tinyurl.com/mikescalendar
(tiny url redirects to google calendar)

~ Certainty is folly... I think. ~

        [[alternative HTML version deleted]]

______________________________________________
R-help@r-project.org mailing list
https://stat.ethz.ch/mailman/listinfo/r-help
PLEASE do read the posting guide http://www.R-project.org/posting-guide.html
and provide commented, minimal, self-contained, reproducible code.

Reply via email to