10.3 Lab: A Single Layer Network on the Hitters Data
How to perform deep learning in RStudio:
- Single Layer
library(ISLR2)
<- na.omit(Hitters)
Gitters <- nrow(Gitters)
n
set.seed(13)
<- trunc(n / 3) #rounding numbers
ntest <- sample(1:n, ntest) testid
Fit the model with testid selection
<- Gitters[-testid, ]
training <- Gitters[testid, ]
testing
<- lm(Salary ~ ., data = training)
lfit
<- predict(lfit, testing)
lpred
<- cbind(testing,lpred)
pred_test
mean(abs(pred_test$lpred-pred_test$Salary))
## [1] 254.6687
Standardize the matrix and fit the lasso using glmnet
<- scale(model.matrix(Salary ~ . - 1, data = Gitters))
x
<- Gitters$Salary
y
library(glmnet)
## Loading required package: Matrix
##
## Attaching package: 'Matrix'
## The following objects are masked from 'package:tidyr':
##
## expand, pack, unpack
## Loaded glmnet 4.1-8
<- cv.glmnet(x[-testid, ], y[-testid], type.measure = "mae")
cvfit <- predict(cvfit, x[testid, ], s = "lambda.min")
cpred mean(abs(y[testid] - cpred))
## [1] 252.2994
There are two ways to fit the Neural Network:
- using {keras} implies pyton
- using {torch}
Fit the neural network with {keras}:
keras_model_sequential()
layer_dense()
layer-dropout()
Keras requires some installation on RStudio:
library(ISLR2)
tryCatch(
remove.packages(c("keras", "tensorflow", "reticulate")),
error = function(e) "Some or all packages not previously installed, that's ok!"
)
install.packages("keras", repos = 'https://cloud.r-project.org')
write('RETICULATE_AUTOCONFIGURE=FALSE', file = "~/.Renviron", append = TRUE)
write(sprintf('RETICULATE_MINICONDA_PATH=%s',
normalizePath("~/islr-miniconda", winslash = "/", mustWork = FALSE)),
file = "~/.Renviron", append = TRUE)
# restart R
source(system.file("helpers", "install.R", package = "ISLR2"))
::install_miniconda(force = TRUE)
reticulate::install_tensorflow() tensorflow
library(tidyverse)
library(keras)
library(tensorflow)
<- keras_model_sequential() %>%
modnn layer_dense(units = 50, activation = "relu",
input_shape = ncol(x)) %>%
layer_dropout(rate = 0.4) %>%
layer_dense(units = 1)
Resources:
The second way is to use {torch}:
library(torch)
library(luz) # high-level interface for torch
library(torchvision) # for datasets and image transformation
library(torchdatasets) # for datasets we are going to use
library(zeallot)
torch_manual_seed(13)
<- nn_module(
modnn initialize = function(input_size) {
$hidden <- nn_linear(input_size, 50)
self$activation <- nn_relu()
self$dropout <- nn_dropout(0.4)
self$output <- nn_linear(50, 1)
self
},forward = function(x) {
%>%
x $hidden() %>%
self$activation() %>%
self$dropout() %>%
self$output()
self
} )