# \donttest{
if (requireNamespace("keras3", quietly = TRUE)) {
library(keras3)
library(parsnip)
# 1. Define block functions. These are the building blocks of our model.
# An input block that receives the data's shape automatically.
input_block <- function(input_shape) layer_input(shape = input_shape)
# A dense block with a tunable `units` parameter.
dense_block <- function(tensor, units) {
tensor |> layer_dense(units = units, activation = "relu")
}
# A block that adds two tensors together (for the residual connection).
add_block <- function(input_a, input_b) layer_add(list(input_a, input_b))
# An output block for regression.
output_block_reg <- function(tensor) layer_dense(tensor, units = 1)
# 2. Create the spec. The `layer_blocks` list defines the graph.
create_keras_functional_spec(
model_name = "my_resnet_spec",
layer_blocks = list(
# The names of list elements are the node names.
main_input = input_block,
# The argument `main_input` connects this block to the input node.
dense_path = function(main_input, units = 32) dense_block(main_input, units),
# This block's arguments connect it to the original input AND the dense layer.
add_residual = function(main_input, dense_path) add_block(main_input, dense_path),
# This block must be named 'output'. It connects to the residual add layer.
output = function(add_residual) output_block_reg(add_residual)
),
mode = "regression"
)
# 3. Use the newly created specification function!
# The `dense_path_units` argument was created automatically.
model_spec <- my_resnet_spec(dense_path_units = 64, epochs = 10)
# You could also tune the number of dense layers since it has a single input:
# model_spec <- my_resnet_spec(num_dense_path = 2, dense_path_units = 32)
print(model_spec)
remove_keras_spec("my_resnet_spec")
# tune::tunable(model_spec)
}
# }
Run the code above in your browser using DataLab