HM 5


3.3.1 and 3.3.2; They are a bit hard so let's have it due next Thursday. Feel free to consult with me throughout the week.

ANSWERS:

1.

dat=c(33,17,8,42,40,10,30,20)

#negative log likelihood of high-threshold model , 1 condition.
#par=c(d,c)
#y=c(h,m,f,c)
nll.condition=function(par,y)
{
d=par[1]
g=par[2]
p=1:4 # reserve space
p[1]=d+(1-d)*g #probability of a hit
p[2]=1-p[1] # probability of a miss
p[3]=g # probability of a false alarm
p[4] = 1-p[3] #probability of a correct rejection
return(-sum(y*log(p)))
}

#Model 1, free d and free g
#negative log likelihood for Model 1:
#assign par4=d1,g1,d2,g2
#assign y8=(h1,m1,f1,c1,h2,m2,f2,c2)
nll.1=function(par4,y8)
{
nll.condition(par4[1:2],y8[1:4])+ #condition 1
nll.condition(par4[3:4],y8[5:8]) #condition 2
}

#negative log likelihood for Model 2:
#assign par3=d,g1,g2
#assign y8=(h1,m1,f1,c1,h2,m2,f2,c2)
nll.2=function(par3,y8)
{
nll.condition(par3[1:2],y8[1:4])+
nll.condition(par3[c(1,3)],y8[5:8])
}

#negative log likelihood for Model 3:
#par3=d1,d2,g
#y8=(h1,m1,f1,c1,h2,m2,f2,c2)
nll.3=function(par3,y8)
{
nll.condition(par3[c(1,3)],y8[1:4])+
nll.condition(par3[2:3],y8[5:8])
}

#Model 1
par=c(.5,.5,.5,.5) #starting values
mod1=optim(par,nll.1,y8=dat,hessian=T)
#Model 2
par=c(.5,.5,.5) #starting values
mod2=optim(par,nll.2,y8=dat,hessian=T)
#Model 3
par=c(.5,.5,.5) #starting values
mod3=optim(par,nll.3,y8=dat,hessian=T)

#test of constant detection M1 vs M2
-2*(mod1$value-mod2$value) #invariance of detection

#test of constant guessing rate M1 vs M3
-2*(mod1$value-mod3$value) #effect of condition on guessing

######################################################

2.

Let's generate a single data set from true values with model 2:

pred.condition=function(par)
#input parameters d and g
#output 4 probabilities (h,m,f,c)
{
d=par[1]
g=par[2]
p=1:4 # reserve space
p[1]=d+(1-d)*g #probability of a hit
p[2]=1-p[1] # probability of a miss
p[3]=g # probability of a false alarm
p[4] = 1-p[3] #probability of a correct rejection
return(p)
}

makedat.condition=function(par,N)
{
p=pred.condition(par)
y=1:4
y[1]=rbinom(1,N,p[1])
y[2]=N-y[1]
y[3]=rbinom(1,N,p[3])
y[4]=N-y[3]
return(y)
}

par=c(.3,.64,.04)

makedat.2=function(par3,N)
{ c(makedat.condition(par3[1:2],N),makedat.condition(par3[c(1,3)],N))
}

y8=makedat.2(par,50)
out=optim(par,nll.2,y8=y8)

#lets make a big loop
M=10000
d=1:M
for (r in 1:M)
{
y8=makedat.2(par,50)
parameters=optim(par,nll.2,y8=y8)$par
d[r]=parameters[1]
print(r)
}

hist(d)

dat=c(40,10,30,20,15,35,2,48)
par=c(.5,.5,.5,.5) #starting values
mod1=optim(par,nll.1,y8=dat,hessian=T)
par=c(.5,.5,.5) #starting values
mod2=optim(par,nll.2,y8=dat,hessian=T)

par=mod2$par

#lets make a big loop
M=1000
d=1:M
for (r in 1:M)
{
y8=makedat.2(par,50)
parameters=optim(par,nll.2,y8=y8)$par
d[r]=parameters[1]
print(r)
}

hist(d)

######################################
Problem 3.3.2 REDO

M=1000
flag1=0
flag2=0
d=matrix(ncol=2,nrow=M)
for (r in 1:M)
{
y=makedat.2(par,50)
if (y[1] < y[3]) flag1=flag1+1
if (y[5] < y[7]) flag2=flag2+1
h1=y[1]/(y[1]+y[2])
f1=y[3]/(y[3]+y[4])
d[r,1]=(h1-f1)/(1-f1)
h2=y[5]/(y[5]+y[6])
f2=y[7]/(y[7]+y[8])
d[r,2]=(h2-f2)/(1-f2)
print(r)
}

Research Methods 3020