This learner supports exponential smoothing models using
ets
.
R6Class
object.
Learner object with methods for training and prediction. See
Lrnr_base
for documentation on learners.
model="ZZZ"
: Three-character string identifying method. In all
cases, "N"=none, "A"=additive, "M"=multiplicative, and "Z"=automatically
selected. The first letter denotes the error type, second letter denotes
the trend type, third letter denotes the season type. For example, "ANN"
is simple exponential smoothing with additive errors, "MAM" is
multiplicative Holt-Winters' methods with multiplicative errors, etc.
damped=NULL
: If TRUE, use a damped trend (either additive or
multiplicative). If NULL, both damped and non-damped trends will be tried
and the best model (according to the information criterion ic) returned.
alpha=NULL
: Value of alpha. If NULL, it is estimated.
beta=NULL
: Value of beta. If NULL, it is estimated.
gamma=NULL
: Value of gamma. If NULL, it is estimated.
phi=NULL
: Value of phi. If NULL, it is estimated.
lambda=NULL
: Box-Cox transformation parameter. Ignored if
NULL
. When lambda is specified, additive.only
is set to
TRUE
.
additive.only=FALSE
: If TRUE
, will only consider
additive models.
biasadj=FALSE
: Use adjusted back-transformed mean for Box-Cox
transformations.
lower=c(rep(1e-04, 3), 0.8)
: Lower bounds for the parameters
(alpha, beta, gamma, phi).
upper=c(rep(0.9999,3), 0.98)
: Upper bounds for the parameters
(alpha, beta, gamma, phi)
opt.crit="lik"
: Optimization criterion.
nmse=3
: Number of steps for average multistep MSE (1 <= nmse
<= 30).
bounds="both"
" Type of parameter space to impose: "usual"
indicates all parameters must lie between specified lower and upper
bounds; "admissible" indicates parameters must lie in the admissible
space; "both" (default) takes the intersection of these regions.
ic="aic"
: Information criterion to be used in model
selection.
restrict=TRUE
: If TRUE, models with infinite variance will not
be allowed.
allow.multiplicative.trend=FALSE
: If TRUE, models with
multiplicative trend are allowed when searching for a model.
use.initial.values=FALSE
: If TRUE
and model is of class
"ets", then the initial values in the model are also not re-estimated.
n.ahead
: The forecast horizon. If not specified, returns
forecast of size task$X
.
freq=1
: the number of observations per unit of time.
...
: Other parameters passed to ets.
Other Learners:
Custom_chain
,
Lrnr_HarmonicReg
,
Lrnr_arima
,
Lrnr_bartMachine
,
Lrnr_base
,
Lrnr_bayesglm
,
Lrnr_bilstm
,
Lrnr_caret
,
Lrnr_cv_selector
,
Lrnr_cv
,
Lrnr_dbarts
,
Lrnr_define_interactions
,
Lrnr_density_discretize
,
Lrnr_density_hse
,
Lrnr_density_semiparametric
,
Lrnr_earth
,
Lrnr_gam
,
Lrnr_ga
,
Lrnr_gbm
,
Lrnr_glm_fast
,
Lrnr_glm_semiparametric
,
Lrnr_glmnet
,
Lrnr_glmtree
,
Lrnr_glm
,
Lrnr_grfcate
,
Lrnr_grf
,
Lrnr_gru_keras
,
Lrnr_gts
,
Lrnr_h2o_grid
,
Lrnr_hal9001
,
Lrnr_haldensify
,
Lrnr_hts
,
Lrnr_independent_binomial
,
Lrnr_lightgbm
,
Lrnr_lstm_keras
,
Lrnr_mean
,
Lrnr_multiple_ts
,
Lrnr_multivariate
,
Lrnr_nnet
,
Lrnr_nnls
,
Lrnr_optim
,
Lrnr_pca
,
Lrnr_pkg_SuperLearner
,
Lrnr_polspline
,
Lrnr_pooled_hazards
,
Lrnr_randomForest
,
Lrnr_ranger
,
Lrnr_revere_task
,
Lrnr_rpart
,
Lrnr_rugarch
,
Lrnr_screener_augment
,
Lrnr_screener_coefs
,
Lrnr_screener_correlation
,
Lrnr_screener_importance
,
Lrnr_sl
,
Lrnr_solnp_density
,
Lrnr_solnp
,
Lrnr_stratified
,
Lrnr_subset_covariates
,
Lrnr_svm
,
Lrnr_tsDyn
,
Lrnr_ts_weights
,
Lrnr_xgboost
,
Pipeline
,
Stack
,
define_h2o_X()
,
undocumented_learner
library(origami)
data(bsds)
folds <- make_folds(bsds,
fold_fun = folds_rolling_window, window_size = 500,
validation_size = 100, gap = 0, batch = 50
)
task <- sl3_Task$new(
data = bsds,
folds = folds,
covariates = c(
"weekday", "temp"
),
outcome = "cnt"
)
expSmooth_lrnr <- make_learner(Lrnr_expSmooth)
train_task <- training(task, fold = task$folds[[1]])
valid_task <- validation(task, fold = task$folds[[1]])
expSmooth_fit <- expSmooth_lrnr$train(train_task)
expSmooth_preds <- expSmooth_fit$predict(valid_task)