| get_pycox_activation {survivalmodels} | R Documentation |
Get Pytorch Activation Function
Description
Helper function to return a class or constructed object for
pytorch activation function from torch.nn.modules.activation.
Usage
get_pycox_activation(
activation = "relu",
construct = TRUE,
alpha = 1,
dim = NULL,
lambd = 0.5,
min_val = -1,
max_val = 1,
negative_slope = 0.01,
num_parameters = 1L,
init = 0.25,
lower = 1/8,
upper = 1/3,
beta = 1,
threshold = 20,
value = 20
)
Arguments
activation |
|
construct |
|
alpha |
|
dim |
|
lambd |
|
min_val, max_val |
|
negative_slope |
|
num_parameters |
|
init |
|
lower, upper |
|
beta |
|
threshold |
|
value |
|
Details
Implemented methods (with help pages) are
-
"celu"
reticulate::py_help(torch$nn$modules$activation$CELU) -
"elu"
reticulate::py_help(torch$nn$modules$activation$ELU) -
"gelu"
reticulate::py_help(torch$nn$modules$activation$GELU) -
"glu"
reticulate::py_help(torch$nn$modules$activation$GLU) -
"hardshrink"
reticulate::py_help(torch$nn$modules$activation$Hardshrink) -
"hardsigmoid"
reticulate::py_help(torch$nn$modules$activation$Hardsigmoid) -
"hardswish"
reticulate::py_help(torch$nn$modules$activation$Hardswish) -
"hardtanh"
reticulate::py_help(torch$nn$modules$activation$Hardtanh) -
"relu6"
reticulate::py_help(torch$nn$modules$activation$ReLU6) -
"leakyrelu"
reticulate::py_help(torch$nn$modules$activation$LeakyReLU) -
"logsigmoid"
reticulate::py_help(torch$nn$modules$activation$LogSigmoid) -
"logsoftmax"
reticulate::py_help(torch$nn$modules$activation$LogSoftmax) -
"prelu"
reticulate::py_help(torch$nn$modules$activation$PReLU) -
"rrelu"
reticulate::py_help(torch$nn$modules$activation$RReLU) -
"relu"
reticulate::py_help(torch$nn$modules$activation$ReLU) -
"selu"
reticulate::py_help(torch$nn$modules$activation$SELU) -
"sigmoid"
reticulate::py_help(torch$nn$modules$activation$Sigmoid) -
"softmax"
reticulate::py_help(torch$nn$modules$activation$Softmax) -
"softmax2d"
reticulate::py_help(torch$nn$modules$activation$Softmax2d) -
"softmin"
reticulate::py_help(torch$nn$modules$activation$Softmin) -
"softplus"
reticulate::py_help(torch$nn$modules$activation$Softplus) -
"softshrink"
reticulate::py_help(torch$nn$modules$activation$Softshrink) -
"softsign"
reticulate::py_help(torch$nn$modules$activation$Softsign) -
"tanh"
reticulate::py_help(torch$nn$modules$activation$Tanh) -
"tanhshrink"
reticulate::py_help(torch$nn$modules$activation$Tanhshrink) -
"threshold"
reticulate::py_help(torch$nn$modules$activation$Threshold)
Value
No return value.