n = 1000
x = matrix(rnorm(n*2),n,2)
c = 1+3*x[,1]
y = rbinom(n,1,1/(1+exp(-c)))
xtest = matrix(rnorm(n*2),n,2)
ctest = 1+3*xtest[,1]
ytest = rbinom(n,1,1/(1+exp(-ctest)))
##Use svm classifier and the default type I error control with alpha=0.05
fit = npc(x, y, method = 'svm')
pred = predict(fit,xtest)
fit.score = predict(fit,x)
accuracy = mean(pred$pred.label==ytest)
cat('Overall Accuracy: ', accuracy,'\n')
ind0 = which(ytest==0)
typeI = mean(pred$pred.label[ind0]!=ytest[ind0]) #type I error on test set
cat('Type I error: ', typeI, '\n')
##Now, change the method to logistic regression and change alpha to 0.1
fit = npc(x, y, method = 'logistic', alpha = 0.1)
pred = predict(fit,xtest)
accuracy = mean(pred$pred.label==ytest)
cat('Overall Accuracy: ', accuracy,'\n')
ind0 = which(ytest==0)
typeI = mean(pred$pred.label[ind0]!=ytest[ind0]) #type I error on test set
cat('Type I error: ', typeI, '\n')
##Now, change the method to adaboost
#fit = npc(x, y, method = 'ada', alpha = 0.1)
#pred = predict(fit,xtest)
#accuracy = mean(pred$pred.label==ytest)
#cat('Overall Accuracy: ', accuracy,'\\n')
#ind0 = which(ytest==0)
#typeI = mean(pred$pred.label[ind0]!=ytest[ind0]) #type I error on test set
#cat('Type I error: ', typeI, '\\n')
##A 'custom' npc classifier with y and score.
#fit2 = npc(y = y, score = fit.score$pred.score,
#pred.score = pred$pred.score, method = 'custom')
Run the code above in your browser using DataLab