# NOT RUN {
# }
# NOT RUN {
library(keras)
data(peDumms)
ped <- peDumms[,c(1,20,22:27,29,32,31)]
# predict wage income
x <- ped[,-11]
y <- ped[,11]
z <- krsFit(x,y,c(50,50,50),classif=FALSE,nEpoch=25)
preds <- predict(z,x)
mean(abs(preds-y)) # something like 25000
x <- ped[,-(4:8)]
y <- ped[,4:8]
y <- dummiesToInt(y,FALSE) - 1
z <- krsFit(x,y,c(50,50,0.20,50),classif=TRUE,nEpoch=175,nClass=6)
preds <- predict(z,x)
mean(preds == y) # something like 0.39
# obtain MNIST training and test sets; the following then uses the
# example network of
# https://databricks-prod-cloudfront.cloud.databricks.com/
# public/4027ec902e239c93eaaa8714f173bcfc/2961012104553482/
# 4462572393058129/1806228006848429/latest.html
# converted to use the krsFit wrapper
x <- mntrn[,-785] / 255
y <- mntrn[,785]
xShape <- c(28,28)
# define convolutional layers
conv1 <- list(type='conv2d',filters=32,kern=3)
conv2 <- list(type='pool',kern=2)
conv3 <- list(type='conv2d',filters=64,kern=3)
conv4 <- list(type='pool',kern=2)
conv5 <- list(type='drop',drop=0.5)
# call wrapper, 1 dense hidden layer of 128 units, then dropout layer
# with proportion 0.5
z <- krsFit(x,y,conv=list(conv1,conv2,conv3,conv4,conv5),c(128,0.5),
classif=TRUE,nClass=10,nEpoch=10,xShape=c(28,28),scaleX=FALSE,scaleY=FALSE)
# try on test set
preds <- predict(z,mntst[,-785]/255)
mean(preds == mntst[,785]) # 0.98 in my sample run
# }
# NOT RUN {
# }
Run the code above in your browser using DataLab