##Appendix 1: R CODE TO FIT THE MAIN MODEL AND THE MODEL WITH A MODIFIED PRIOR FOR THE BETWEEN-STUDY VARIANCE library(Brobdingnag) ## to handle extreme numbers ##### parameters: ## K: number of iterations ## x: observed effect sizes ## s2: the corresponding variances ## M1: starting value for the mean effect size ## alphha1: starting values for the study-specific means ## tau1: starting value for the between-study variance ## v11, v21, v31: starting values for the parameters of the weight function ## sm: selection mechanism # sm="pos" when RR=P(including statistically significant positive outcomes/P(including other outcomes) # sm="neg" when RR=P(including statistically significant negative outcomes/P(including other outcomes) # sm="2sided" when RR=P(including statistically significant outcomes/P(including other outcomes) ## tau_prior: prior distribution for the between-study variance # tau_prior="gelman": a uniform prior for between-study standard deviation # tau_prior="invtau": a 1/between-study variance prior for the between study variance ##### R function: pr<-function(K,x,s2,M1,alpha1,tau1,v11,v21,v31,sm,tau_prior) { ## parameters of the prior distributions of the mean effect size a<-0 b<-1000 ## chains M<-M1 alpha<-rbind(alpha1) tau<-tau1 v1<-v11 v2<-v21 v3<-v31 ## parameters needed for the Gibbs algorithm I<-length(x) Ib<-I*b nps<-sum(x-1.96*s2^0.5>=0) nns<-sum(x+1.96*s2^0.5<=0) nnots<-I-(nps+nns) ## functions needed for the Gibbs algorithm fconst<-function(v1,v2,v3,alphai,s2i) { integral1<-1-pnorm(s2i^0.5*1.96,mean=alphai,sd=s2i^0.5) integral3<-pnorm(-s2i^0.5*1.96,mean=alphai,sd=s2i^0.5) integral2<-1-(integral1+integral3) v1*integral1+v2*integral2+v3*integral3 } fcalphai<-function(alphai,M,tau,v1,v2,v3,s2i,xi) ## full conditional distribution for alpha[i] { mean<-xi*tau/(tau+s2i)+M*(s2i/(tau+s2i)) var<-(tau*s2i)/(tau+s2i) (fconst(v1,v2,v3,alphai,s2i)^-1)*(1/var^0.5)*exp(((alphai-mean)^2 )/(-2*var)) } fcalphai_exnum<-function(alphai,M,tau,v1,v2,v3,s2i,xi) ## full conditional distribution for alpha[i] (for extreme numbers) { mean<-xi*tau/(tau+s2i)+M*(s2i/(tau+s2i)) var<-(tau*s2i)/(tau+s2i) (fconst(v1,v2,v3,alphai,s2i)^-1) * (1/var^0.5) * exp(( (alphai-mean)^2 )/ (-2*as.brob(var)) ) } ## Gibbs sampler for (k in 1:K) { ## study-specific effect sizes alpha_new<-rep(NA,I) for (i in 1:I) { ## Metropolis step to sample alpha alphai_prop<-rnorm(1,alpha[k,i],s2[i]^0.5) cand<-fcalphai(alphai=alphai_prop,M[k],tau[k],v1[k],v2[k],v3[k],s2i=s2[i],xi=x[i]) prev<-fcalphai(alpha[k,i],M[k],tau[k],v1[k],v2[k],v3[k],s2[i],x[i]) if(cand!=0 & prev!=0 & is.nan(prev)==FALSE & is.nan(cand)==FALSE) { Beta<-min(cand/prev,1) } if (cand==0|prev==0|is.nan(prev)==TRUE| is.nan(cand)==TRUE) ## to deal with extreme numbers { cand<-fcalphai_exnum(alphai_prop,M[k],tau[k],v1[k],v2[k],v3[k],s2i=s2[i],xi=x[i]) prev<-fcalphai_exnum(alpha[k,i],M[k],tau[k],v1[k],v2[k],v3[k],s2[i],x[i]) Beta<-min(as.numeric(cand/prev),1) } accept<-sample(c(1,0),size=1,prob=c(Beta,1-Beta)) if (accept==1) {alpha_new[i]<-alphai_prop} if (accept==0) {alpha_new[i]<-alpha[k,i]} } alpha<-rbind(alpha,alpha_new) ## mean effect size M[k+1]<-rnorm(1,mean=mean(alpha[k+1,])*(Ib/(tau[k]+Ib))+a*(tau[k]/(tau[k]+Ib)),sd=((tau[k]*b)/(tau[k]+Ib))^0.5) ## between-study variance if (tau_prior=="gelman") { invtau<-rgamma(1,shape=(I-1)/2,scale=1/(0.5*sum((alpha[k+1,]-M[k+1])^2))) tau[k+1]<-1/invtau } if (tau_prior=="invtau") { invtau<-rgamma(1,shape=0.5*I,scale=1/(0.5*sum((alpha[k+1,]-M[k+1])^2))) tau[k+1]<-1/invtau } ## parameters of the weight function: Metropolis step vc<-runif(2) if (sm=="pos") { v1c<-vc[1] v2c<-vc[2] v3c<-v2c } if (sm=="neg") { v1c<-vc[1] v2c<-v1c v3c<-vc[2] } if (sm=="2sided") { v1c<-vc[1] v2c<-vc[2] v3c<-v1c } den1<-rep(NA,I) den2<-rep(NA,I) for (i in 1:I) { den1[i]<-fconst(v1c,v2c,v3c,alphai=alpha[k+1,i],s2i=s2[i]) den2[i]<-fconst(v1[k],v2[k],v3[k],alphai=alpha[k+1,i],s2i=s2[i]) } cand<-(v1c^nps*v2c^nnots*v3c^nns)/prod(den1) prev<-(v1[k]^nps*v2[k]^nnots*v3[k]^nns)/prod(den2) if(is.na(cand)==TRUE | is.na(prev)==TRUE |cand==0 | prev==0) ## to deal with extreme numbers { cand<-(as.brob(v1c)^nps*as.brob(v2c)^nnots*as.brob(v3c)^nns)/prod(as.brob(den1)) prev<-(as.brob(v1[k])^nps*as.brob(v2[k])^nnots*as.brob(v3[k])^nns)/prod(as.brob(den2)) } if(is.brob(cand)==FALSE) { Beta<-min(cand/prev,1) } if(is.brob(cand)==TRUE) { Beta<-min(as.numeric(cand/prev),1) #the probability of accepting the candidate value } accept<-sample(c(1,0),size=1,prob=c(Beta,1-Beta)) if (accept==1) { v1[k+1]<-v1c v2[k+1]<-v2c v3[k+1]<-v3c } if (accept==0) { v1[k+1]<-v1[k] v2[k+1]<-v2[k] v3[k+1]<-v3[k] } } if (sm=="pos"|sm=="2sided") {rr<-v1/v2} if (sm=="neg") {rr<-v3/v2} list(alpha=alpha,M=M,tau=tau,v1=v1,v2=v2,v3=v3,rr=rr) } ##### Examples: ## Function to generate data datsam<-function(N,m,tau,pb_rr) { ests<-rep(NA,N) s2<-rep(NA,N) i<-1 while (i <= N) { sd<-rlnorm(1,-2,2)^0.5 meani<-rnorm(1,mean=m,sd=tau^0.5) est<-rnorm(1,mean=meani,sd=sd) if (est<1.96*sd) {publication<-sample(x=c(1,0),size=1,prob=c(1/(pb_rr),1-1/(pb_rr)))} if (est>=1.96*sd) {publication<-1} if (publication==1) { ests[i]<-est s2[i]<-sd^2 i<-i+1 } } list(x=ests,s2=s2) } ## Example 1: strong selection sample<-datsam(N=50,m=0.5,tau=0.1,pb_rr=10) chains<-pr(K=1000,x=sample$x,s2=sample$s2,M1=mean(sample$x),alpha1=rnorm(length(sample$x),0,1),tau1=0.1,v11=1,v21=1,v31=1,sm="pos",tau_prior="gelman") plot(chains$rr) ## Example 2: no selection sample<-datsam(N=50,m=0.5,tau=0.1,pb_rr=1) chains<-pr(K=1000,x=sample$x,s2=sample$s2,M1=0,alpha1=rnorm(length(sample$x),0,1),tau1=0.01,v11=1,v21=1,v31=1,sm="pos",tau_prior="gelman") plot(chains$rr)