/* OLS IS BLUE GIVEN A1. Linear model A2. Homoskedastic Errors */ new; cls; library pgraph; /* Below we show that OLS is unbiased and more efficient than an alternative linear unbiased estimator TRUE MODEL : Y=X*BETA+E X~N(0,1) E~N(0,1) */ n=100; // sample size for each random sample x=rndn(n,1); beta=1; b={}; //ols estimator bx={}; //extreme point estimator for i (1,1000,1); // this creates 1000 random samples y=x*beta+rndn(100,1); //note X is fixed in repeated samples // only E changes for each sample //extreme point estimator is computed c = sortc(y~x,2); c1=c[1,.]; c100=c[100,.]; c=c100-c1; bx=bx|(c[1]/c[2]); // ols estimator is computed b=b|inv(x'x)*x'y; endfor; "MONTE CARLO SIMULATION"; "OLS is BLUE"; "-----------------------------------"; "Mean of OLS ESTIMATOR";;meanc(b); "Mean of EXTREME POINT ESTIMATOR";; meanc(bx); "True Beta";;beta; ""; "variance of OLS ESTIMATOR";;(stdc(b))^2; "variance of EXTREME POINT ESTIMATOR";;(stdc(bx))^2; "-------------------------------------"; //activate below to see the distribution of estimators call hist(b,50); // call hist(bx,50);