Applies an activation function to an output.
Activation(activation, input_shape = NULL)
name of activation function to use. See Details for possible options.
only need when first layer of a model; sets the input shape of the data
Taylor B. Arnold, taylor.arnold@acm.org
Possible activations include 'softmax', 'elu', 'softplus', 'softsign', 'relu', 'tanh', 'sigmoid', 'hard_sigmoid', linear'. You may also set this equal to any of the outputs from an AdvancedActivation.
Chollet, Francois. 2015. Keras: Deep Learning library for Theano and TensorFlow.
Other layers: ActivityRegularization
,
AdvancedActivation
,
BatchNormalization
, Conv
,
Dense
, Dropout
,
Embedding
, Flatten
,
GaussianNoise
, LayerWrapper
,
LocallyConnected
, Masking
,
MaxPooling
, Permute
,
RNN
, RepeatVector
,
Reshape
, Sequential
if(keras_available()) {
X_train <- matrix(rnorm(100 * 10), nrow = 100)
Y_train <- to_categorical(matrix(sample(0:2, 100, TRUE), ncol = 1), 3)
mod <- Sequential()
mod$add(Dense(units = 50, input_shape = dim(X_train)[2]))
mod$add( Dropout(rate = 0.5))
mod$add(Activation("relu"))
mod$add(Dense(units = 3))
mod$add(ActivityRegularization(l1 = 1))
mod$add(Activation("softmax"))
keras_compile(mod, loss = 'categorical_crossentropy', optimizer = RMSprop())
keras_fit(mod, X_train, Y_train, batch_size = 32, epochs = 5,
verbose = 0, validation_split = 0.2)
# You can also add layers directly as arguments to Sequential()
mod <- Sequential(
Dense(units = 50, input_shape = ncol(X_train)),
Dropout(rate = 0.5),
Activation("relu"),
Dense(units = 3),
ActivityRegularization(l1 = 1),
Activation("softmax")
)
keras_compile(mod, loss = 'categorical_crossentropy', optimizer = RMSprop())
keras_fit(mod, X_train, Y_train, batch_size = 32, epochs = 5,
verbose = 0, validation_split = 0.2)
}
if(keras_available()) {
X_train <- matrix(rnorm(100 * 10), nrow = 100)
Y_train <- to_categorical(matrix(sample(0:2, 100, TRUE), ncol = 1), 3)
mod <- Sequential()
mod$add(Dense(units = 50, input_shape = dim(X_train)[2]))
mod$add( Dropout(rate = 0.5))
mod$add(Activation("relu"))
mod$add(Dense(units = 3))
mod$add(ActivityRegularization(l1 = 1))
mod$add(Activation("softmax"))
keras_compile(mod, loss = 'categorical_crossentropy', optimizer = RMSprop())
keras_fit(mod, X_train, Y_train, batch_size = 32, epochs = 5,
verbose = 0, validation_split = 0.2)
# You can also add layers directly as arguments to Sequential()
mod <- Sequential(
Dense(units = 50, input_shape = ncol(X_train)),
Dropout(rate = 0.5),
Activation("relu"),
Dense(units = 3),
ActivityRegularization(l1 = 1),
Activation("softmax")
)
keras_compile(mod, loss = 'categorical_crossentropy', optimizer = RMSprop())
keras_fit(mod, X_train, Y_train, batch_size = 32, epochs = 5,
verbose = 0, validation_split = 0.2)
}
Run the code above in your browser using DataLab