Skip to content

Commit

Permalink
struggling to catch the rbind error in fn_cross_validation_across_pop…
Browse files Browse the repository at this point in the history
…ulations_pairwise()
  • Loading branch information
jeffersonfparil committed Dec 20, 2024
1 parent 73f63bb commit fb4f7df
Showing 1 changed file with 81 additions and 20 deletions.
101 changes: 81 additions & 20 deletions R/models.R
Original file line number Diff line number Diff line change
Expand Up @@ -291,11 +291,17 @@ fn_ridge = function(list_merged, vec_idx_training, vec_idx_validation, other_par
glmnet::cv.glmnet(x=X_training, y=y_training, alpha=0, nfolds=other_params$n_folds, parallel=FALSE),
error = function(e) {NA})
if (is.na(sol[1])) {
return(list(
list_perf=NA,
df_y_validation=NA,
vec_effects=NA,
n_non_zero=NA))
# return(list(
# list_perf=NA,
# df_y_validation=NA,
# vec_effects=NA,
# n_non_zero=NA))
error = methods::new("gpError",
code=407,
message=paste0(
"Error in models::fn_ridge(...). ",
"Failed to fit the model."))
return(error)
}
### Find the first lambda with the lowest squared error (deviance) while having non-zero SNP effects
vec_idx_decreasing_deviance = order(sol$glmnet.fit$dev.ratio, decreasing=FALSE)
Expand Down Expand Up @@ -444,11 +450,17 @@ fn_lasso = function(list_merged, vec_idx_training, vec_idx_validation, other_par
glmnet::cv.glmnet(x=X_training, y=y_training, alpha=1, nfolds=other_params$n_folds, parallel=FALSE),
error = function(e) {NA})
if (is.na(sol[1])) {
return(list(
list_perf=NA,
df_y_validation=NA,
vec_effects=NA,
n_non_zero=NA))
# return(list(
# list_perf=NA,
# df_y_validation=NA,
# vec_effects=NA,
# n_non_zero=NA))
error = methods::new("gpError",
code=407,
message=paste0(
"Error in models::fn_lasso(...). ",
"Failed to fit the model."))
return(error)
}
### Find the first lambda with the lowest squared error (deviance) while having non-zero SNP effects
vec_idx_decreasing_deviance = order(sol$glmnet.fit$dev.ratio, decreasing=FALSE)
Expand Down Expand Up @@ -597,11 +609,17 @@ fn_elastic_net = function(list_merged, vec_idx_training, vec_idx_validation, oth
glmnet::cv.glmnet(x=X_training, y=y_training),
error = function(e) {NA})
if (is.na(sol[1])) {
return(list(
list_perf=NA,
df_y_validation=NA,
vec_effects=NA,
n_non_zero=NA))
# return(list(
# list_perf=NA,
# df_y_validation=NA,
# vec_effects=NA,
# n_non_zero=NA))
error = methods::new("gpError",
code=407,
message=paste0(
"Error in models::fn_elastic_net(...). ",
"Failed to fit the model."))
return(error)
}
### Find the first lambda with the lowest squared error (deviance) while having non-zero SNP effects
vec_idx_decreasing_deviance = order(sol$glmnet.fit$dev.ratio, decreasing=FALSE)
Expand Down Expand Up @@ -754,7 +772,17 @@ fn_Bayes_A = function(list_merged, vec_idx_training, vec_idx_validation,
### Attempt at preventing overwrites to the running Gibbs samplers in parallel
other_params$out_prefix = gsub(":", ".", gsub(" ", "-", paste(other_params$out_prefix, Sys.time(), stats::runif(1), sep="-")))
### Solve via Bayes A (a priori assume heritability at 50%, i.e. R2=0.5 below)
sol = BGLR::BGLR(y=yNA, ETA=ETA, R2=0.5, nIter=other_params$nIter, burnIn=other_params$burnIn, saveAt=other_params$out_prefix, verbose=FALSE)
sol = tryCatch(
BGLR::BGLR(y=yNA, ETA=ETA, R2=0.5, nIter=other_params$nIter, burnIn=other_params$burnIn, saveAt=other_params$out_prefix, verbose=FALSE),
error = function(e) {NA})
if (is.na(sol[1])) {
error = methods::new("gpError",
code=407,
message=paste0(
"Error in models::fn_Bayes_A(...). ",
"Failed to fit the model."))
return(error)
}
### Extract effects including the intercept and fixed effects
if (!is.null(list_merged$COVAR)) {
b_hat = c(sol$mu, sol$ETA[[2]]$b, sol$ETA$MRK$b)
Expand Down Expand Up @@ -901,7 +929,17 @@ fn_Bayes_B = function(list_merged, vec_idx_training, vec_idx_validation,
### Attempt at preventing overwrites to the running Gibbs samplers in parallel
other_params$out_prefix = gsub(":", ".", gsub(" ", "-", paste(other_params$out_prefix, Sys.time(), stats::runif(1), sep="-")))
### Solve via Bayes B (a priori assume heritability at 50%, i.e. R2=0.5 below)
sol = BGLR::BGLR(y=yNA, ETA=ETA, R2=0.5, nIter=other_params$nIter, burnIn=other_params$burnIn, saveAt=other_params$out_prefix, verbose=FALSE)
sol = tryCatch(
BGLR::BGLR(y=yNA, ETA=ETA, R2=0.5, nIter=other_params$nIter, burnIn=other_params$burnIn, saveAt=other_params$out_prefix, verbose=FALSE),
error = function(e) {NA})
if (is.na(sol[1])) {
error = methods::new("gpError",
code=407,
message=paste0(
"Error in models::fn_Bayes_B(...). ",
"Failed to fit the model."))
return(error)
}
### Extract effects including the intercept and fixed effects
if (!is.null(list_merged$COVAR)) {
b_hat = c(sol$mu, sol$ETA[[2]]$b, sol$ETA$MRK$b)
Expand Down Expand Up @@ -1048,7 +1086,17 @@ fn_Bayes_C = function(list_merged, vec_idx_training, vec_idx_validation,
### Attempt at preventing overwrites to the running Gibbs samplers in parallel
other_params$out_prefix = gsub(":", ".", gsub(" ", "-", paste(other_params$out_prefix, Sys.time(), stats::runif(1), sep="-")))
### Solve via Bayes C (a priori assume heritability at 50%, i.e. R2=0.5 below)
sol = BGLR::BGLR(y=yNA, ETA=ETA, R2=0.5, nIter=other_params$nIter, burnIn=other_params$burnIn, saveAt=other_params$out_prefix, verbose=FALSE)
sol = tryCatch(
BGLR::BGLR(y=yNA, ETA=ETA, R2=0.5, nIter=other_params$nIter, burnIn=other_params$burnIn, saveAt=other_params$out_prefix, verbose=FALSE),
error = function(e) {NA})
if (is.na(sol[1])) {
error = methods::new("gpError",
code=407,
message=paste0(
"Error in models::fn_Bayes_C(...). ",
"Failed to fit the model."))
return(error)
}
### Extract effects including the intercept and fixed effects
if (!is.null(list_merged$COVAR)) {
b_hat = c(sol$mu, sol$ETA[[2]]$b, sol$ETA$MRK$b)
Expand Down Expand Up @@ -1196,9 +1244,22 @@ fn_gBLUP = function(list_merged, vec_idx_training, vec_idx_validation, other_par
eval(parse(text=paste0("df_training$covariate_", j, " = X[, j]")))
}
covariates_string = paste(paste0("covariate_", 1:ncol(X)), collapse="+")
eval(parse(text=paste0("mod = sommer::mmer(y ~ 1 + ", covariates_string, ", random= ~vsr(id, Gu=A ), rcov= ~vsr(units), data=df_training, dateWarning=FALSE, verbose=FALSE)")))
mod = tryCatch(
eval(parse(text=paste0("sommer::mmer(y ~ 1 + ", covariates_string, ", random= ~vsr(id, Gu=A ), rcov= ~vsr(units), data=df_training, dateWarning=FALSE, verbose=FALSE)"))),
error=function(e){NA})
} else {
mod = sommer::mmer(y ~ 1, random= ~vsr(id, Gu=A), rcov= ~vsr(units), data=df_training, dateWarning=FALSE, verbose=FALSE)
mod = tryCatch(
sommer::mmer(y ~ 1, random= ~vsr(id, Gu=A), rcov= ~vsr(units), data=df_training, dateWarning=FALSE, verbose=FALSE),
error=function(e){NA})
}
### Error catching
if (is.na(mod[1])) {
error = methods::new("gpError",
code=407,
message=paste0(
"Error in models::fn_gBLUP(...). ",
"Failed to fit the model."))
return(error)
}
### Extract effects
b_hat = mod$Beta$Estimate; names(b_hat) = mod$Beta$Effect
Expand Down

0 comments on commit fb4f7df

Please sign in to comment.