Newer
Older
#' @title quantile_fit
#'
#' @description This function plots the quantiles
#' @param Lfor List of the data needed to plot
#' @export
#' @examples
#' plotprob()
quantile_fit <- function(prm, model, q, data, printout = TRUE, returnanalysis = TRUE, multistep = NA){
if(printout){
message("----------------")
if(is.na(prm[1])){
message("prm=NA, so current parameters are used.")
}else{
print_to_message(prm)
}
}
# First insert the prm into the model input expressions
model$insert_prm(prm)
## Compute the effective memory of lambda, the window size
model$n_in_bin <- round(2*(1/(1-model$regprm$lambda)))
## If the effective memory is less then the cold start window, then set it to the cold start window.
## HGB 11/02/2025: Unsure if this is correct
if(model$n_in_bin < model$N1){
model$n_in_bin = model$N1 + 1
}
## Transform the data
# Since rls_fit is run from scratch, the init the stored inputs data (only needed when running iteratively)
#model$datatr <- NA
#model$yAR <- NA
# Reset the model state (e.g. inputs state, stored iterative data, ...)
model$reset_state()
if(q %in% as.numeric(sub(".*q", "", names(model$info)))){
model$info <- list()
model$beta <- list()
}
# Generate the 2nd stage inputs (i.e. the transformed data)
model$datatr <- model$transform_data(data)
## Fit the cold start
#X <- data.frame(matrix(unlist(model$datatr), ncol = length(model$datatr), byrow = F))
#colnames(X) <- names(model$datatr)
Ypred <- NULL
for(k in model$kseq){
# cat("horizon :", k, "\n")
# cat("n_in_bin :", model$n_in_bin, "\n")
## Setup the design matrix for k step
X <- as.data.frame(subset(model$datatr, kseq=k))
## Lag them to match to the output, so the input forecast matches the output.
X <- onlineforecast:::lagdf.matrix(X, k)
X$y <- data[[model$output]]
fit <- rq(paste("y ~ -1 + ", paste(names(X[, -which(names(X) %in% c("y"))]), collapse = " + "), sep = ""), tau = q, data = X[1:(model$N1-k),])
## Fill it from start (index 1) to the end of the cold start
beta <- fit$coefficients
model$beta[[paste0("q", q)]][[paste0("k", k)]] <- matrix(beta, nrow = model$N1, ncol = model$K, byrow = T)
r <- as.matrix(fit$residuals, ncol = length(q))
for(i in 1:dim(r)[2]){
## So these guys are the zeros, or lie on the constraints?
IdxNull <- which(abs(r[,i]) < 10^-2)
r[IdxNull, i] <- 0
}
### Initialization of algorithm
model$info[[paste0("q", q)]][[paste0("k", k)]] <- rq_init_cpp(X = as.matrix(X[1:(model$N1-k),]),
R = r,
beta = beta,
n = model$N1-k,
weights = model$regprm$lambda^rev(seq(1,length(r),1)))
## The quantile estimation
Ypredres <- update_rq(as.matrix(X[(model$N1+1 - k):nrow(X),]), q, k, model = model)
### Get the results, Ypred, loss
Ypred <- cbind(Ypred, Ypredres)
}
## Compute the loss function, maybe the residuals are compute in different place
loss <- sapply(model$kseq, function(i) pinball_loss(r = model$info[[paste0("q", q)]][[paste0("k", i)]]$R[data$scoreperiod[(i+1):length(data$scoreperiod)]], q))
model$Ypred <- Ypred
if(!returnanalysis){
#Yhat <- quantile_prediction(model = model, multistep = model$kseq)
cat("Quantile, tau:", q, "\n")
cat("the loss: ", loss, "\n")
val <- sum(loss)
if(printout){
print_to_message(c(loss,sum=val))
}
return(val)
} else{
val <- sum(loss)
if(printout){
print_to_message(c(loss,sum=val))
}
## Get predictions
#Yhat <- model$getPred(multistep = multistep)
#Yhat <- quantile_prediction(model = model, multistep = multistep)
#invisible(structure(list(Yhat = Yhat, model = model, data = data, scoreval = loss), class = c("quantile_fit")))
}
}
print_to_message <- function(...) {
message(paste(utils::capture.output(print(...)), collapse="\n"))
}