From b3fe1993bf7610f3c359274dcbff04811c3bfeb6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Berthet?= Date: Tue, 3 Sep 2024 16:18:26 +0200 Subject: [PATCH] Tomaster (#188) * use dev version of antaresRead on master * update workflow ci/cd on master only --- DESCRIPTION | 4 +- NAMESPACE | 333 +++++++------ NEWS.md | 41 +- R/antaresRead-reexports.R | 20 - R/computeTimeStampFromHourly.R | 3 +- R/createBindingConstraint.R | 386 +++++++++------ R/createClusterST.R | 210 +++++---- R/createStudy.R | 2 +- R/editBindingConstraint.R | 26 +- R/editClusterST.R | 46 +- R/editLink.R | 28 +- R/importStudyAPI.R | 24 +- R/removeArea.R | 5 +- R/removeCluster.R | 5 +- R/removeLink.R | 3 +- R/updateGeneralSettings.R | 107 +++-- R/updateOptimizationSettings.R | 18 +- R/updateOutputSettings.R | 83 ++-- R/utils.R | 22 +- _pkgdown.yml | 2 + man/antaresRead-reexports.Rd | 19 - man/create-study.Rd | 2 +- man/createBindingConstraint.Rd | 51 +- man/createBindingConstraintBulk.Rd | 98 ++++ man/createClusterST.Rd | 54 ++- man/dicoOutputSettings.Rd | 20 + man/dot-format_ini_rhs.Rd | 17 + man/editBindingConstraint.Rd | 1 + ...es_check.Rd => group_values_meta_check.Rd} | 8 +- man/importZipStudyWeb.Rd | 13 +- man/removeBindingConstraint.Rd | 1 + man/storage_values_default.Rd | 12 +- man/updateGeneralSettings.Rd | 49 +- man/updateOptimizationSettings.Rd | 3 + man/updateOutputSettings.Rd | 11 +- tests/testthat/test-createArea.R | 36 +- tests/testthat/test-createBindingConstraint.R | 382 ++++++++++++++- tests/testthat/test-createCluster.R | 20 +- tests/testthat/test-createClusterST.R | 253 ++-------- tests/testthat/test-createLink.R | 11 +- tests/testthat/test-editBindingConstraint.R | 87 +++- tests/testthat/test-editClusterST.R | 40 +- tests/testthat/test-editLink.R | 54 ++- tests/testthat/test-updateGeneralSettings.R | 53 ++- .../test-updateOptimizationSettings.R | 19 + tests/testthat/test-updateOutputSettings.R | 31 +- tests/testthat/test-utils.R | 48 ++ tests/testthat/test-writeInputTS.R | 443 +++++++++--------- vignettes/api-variant-management.Rmd | 2 +- 49 files changed, 1998 insertions(+), 1208 deletions(-) delete mode 100644 R/antaresRead-reexports.R delete mode 100644 man/antaresRead-reexports.Rd create mode 100644 man/createBindingConstraintBulk.Rd create mode 100644 man/dicoOutputSettings.Rd create mode 100644 man/dot-format_ini_rhs.Rd rename man/{group_values_check.Rd => group_values_meta_check.Rd} (79%) create mode 100644 tests/testthat/test-utils.R diff --git a/DESCRIPTION b/DESCRIPTION index 7829746d..c731bda2 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -1,7 +1,7 @@ Package: antaresEditObject Type: Package Title: Edit an 'Antares' Simulation -Version: 0.7.0 +Version: 0.7.1.9000 Authors@R: c( person("Tatiana", "Vargas", email = "tatiana.vargas@rte-france.com", role = c("aut", "cre")), person("Frederic", "Breant", role = "aut"), @@ -53,3 +53,5 @@ Suggests: knitr, rmarkdown VignetteBuilder: knitr +Remotes: + github::rte-antares-rpackage/antaresRead diff --git a/NAMESPACE b/NAMESPACE index a0d8efa9..340a2546 100644 --- a/NAMESPACE +++ b/NAMESPACE @@ -1,167 +1,166 @@ -# Generated by roxygen2: do not edit by hand - -S3method(print,antares.api.command) -S3method(print,antares.api.commands) -S3method(print,antares.api.logs) -export(activateRES) -export(activateST) -export(adequacyOptions) -export(backupStudy) -export(checkRemovedArea) -export(cleanUpOutput) -export(clearScenarioBuilder) -export(computeOtherFromHourlyMulti) -export(computeOtherFromHourlyYear) -export(computeTimeStampFromHourly) -export(convertConfigToAdq) -export(copyOutput) -export(copyStudyWeb) -export(createArea) -export(createBindingConstraint) -export(createBindingConstraintBulk) -export(createCluster) -export(createClusterBulk) -export(createClusterRES) -export(createClusterST) -export(createDSR) -export(createDistrict) -export(createLink) -export(createPSP) -export(createStudy) -export(createStudyAPI) -export(createVariant) -export(create_scb_referential_series_type) -export(deduplicateScenarioBuilder) -export(deleteStudy) -export(dicoGeneralSettings) -export(dicoOptimizationSettings) -export(editArea) -export(editBindingConstraint) -export(editCluster) -export(editClusterRES) -export(editClusterST) -export(editDSR) -export(editLink) -export(editPSP) -export(filteringOptions) -export(getCapacityDSR) -export(getCapacityPSP) -export(getJobLogs) -export(getJobs) -export(getPlaylist) -export(getVariantCommands) -export(group_values_check) -export(importZipStudyWeb) -export(is_antares_v7) -export(is_antares_v820) -export(list_pollutants_values) -export(mockSimulationAPI) -export(nodalOptimizationOptions) -export(propertiesLinkOptions) -export(readIni) -export(readIniAPI) -export(readIniFile) -export(readScenarioBuilder) -export(removeArea) -export(removeBindingConstraint) -export(removeCluster) -export(removeClusterRES) -export(removeClusterST) -export(removeLink) -export(runSimulation) -export(runTsGenerator) -export(scenarioBuilder) -export(searchStudy) -export(setAPImode) -export(setPlaylist) -export(setSolverPath) -export(storage_values_default) -export(updateAdequacySettings) -export(updateGeneralSettings) -export(updateInputSettings) -export(updateOptimizationSettings) -export(updateOutputSettings) -export(updateScenarioBuilder) -export(useVariant) -export(writeEconomicOptions) -export(writeHydroValues) -export(writeIni) -export(writeIniAPI) -export(writeIniFile) -export(writeIniHydro) -export(writeInputTS) -export(writeMiscGen) -export(writeOutputValues) -export(writeSeriesPrepro) -export(writeVariantCommands) -export(writeWaterValues) -import(data.table) -import(doParallel) -import(parallel) -import(pbapply) -import(progressr) -importFrom(antaresRead,api_delete) -importFrom(antaresRead,api_get) -importFrom(antaresRead,api_post) -importFrom(antaresRead,api_put) -importFrom(antaresRead,getAreas) -importFrom(antaresRead,getLinks) -importFrom(antaresRead,readBindingConstraints) -importFrom(antaresRead,readClusterDesc) -importFrom(antaresRead,readClusterSTDesc) -importFrom(antaresRead,readIni) -importFrom(antaresRead,readIniAPI) -importFrom(antaresRead,readIniFile) -importFrom(antaresRead,readInputTS) -importFrom(antaresRead,readLayout) -importFrom(antaresRead,setSimulationPath) -importFrom(antaresRead,setSimulationPathAPI) -importFrom(antaresRead,simOptions) -importFrom(assertthat,assert_that) -importFrom(data.table,":=") -importFrom(data.table,.SD) -importFrom(data.table,CJ) -importFrom(data.table,as.data.table) -importFrom(data.table,data.table) -importFrom(data.table,dcast) -importFrom(data.table,fread) -importFrom(data.table,fwrite) -importFrom(data.table,melt) -importFrom(data.table,month) -importFrom(data.table,rbindlist) -importFrom(data.table,setcolorder) -importFrom(data.table,setnames) -importFrom(data.table,transpose) -importFrom(data.table,yday) -importFrom(data.table,year) -importFrom(doFuture,registerDoFuture) -importFrom(future,plan) -importFrom(grDevices,col2rgb) -importFrom(httr,POST) -importFrom(httr,accept_json) -importFrom(httr,add_headers) -importFrom(httr,content) -importFrom(httr,content_type_json) -importFrom(httr,status_code) -importFrom(httr,stop_for_status) -importFrom(httr,upload_file) -importFrom(jsonlite,toJSON) -importFrom(jsonlite,write_json) -importFrom(lifecycle,deprecated) -importFrom(memuse,Sys.meminfo) -importFrom(plyr,ldply) -importFrom(plyr,llply) -importFrom(stats,as.formula) -importFrom(stats,sd) -importFrom(stats,setNames) -importFrom(utils,getFromNamespace) -importFrom(utils,head) -importFrom(utils,modifyList) -importFrom(utils,packageVersion) -importFrom(utils,read.table) -importFrom(utils,tail) -importFrom(utils,tar) -importFrom(utils,unzip) -importFrom(utils,write.table) -importFrom(whisker,whisker.render) -importFrom(yaml,read_yaml) +# Generated by roxygen2: do not edit by hand + +S3method(print,antares.api.command) +S3method(print,antares.api.commands) +S3method(print,antares.api.logs) +export(activateRES) +export(activateST) +export(adequacyOptions) +export(backupStudy) +export(checkRemovedArea) +export(cleanUpOutput) +export(clearScenarioBuilder) +export(computeOtherFromHourlyMulti) +export(computeOtherFromHourlyYear) +export(computeTimeStampFromHourly) +export(convertConfigToAdq) +export(copyOutput) +export(copyStudyWeb) +export(createArea) +export(createBindingConstraint) +export(createBindingConstraintBulk) +export(createCluster) +export(createClusterBulk) +export(createClusterRES) +export(createClusterST) +export(createDSR) +export(createDistrict) +export(createLink) +export(createPSP) +export(createStudy) +export(createStudyAPI) +export(createVariant) +export(create_scb_referential_series_type) +export(deduplicateScenarioBuilder) +export(deleteStudy) +export(dicoGeneralSettings) +export(dicoOptimizationSettings) +export(dicoOutputSettings) +export(editArea) +export(editBindingConstraint) +export(editCluster) +export(editClusterRES) +export(editClusterST) +export(editDSR) +export(editLink) +export(editPSP) +export(filteringOptions) +export(getCapacityDSR) +export(getCapacityPSP) +export(getJobLogs) +export(getJobs) +export(getPlaylist) +export(getVariantCommands) +export(group_values_meta_check) +export(importZipStudyWeb) +export(is_antares_v7) +export(is_antares_v820) +export(list_pollutants_values) +export(mockSimulationAPI) +export(nodalOptimizationOptions) +export(propertiesLinkOptions) +export(readScenarioBuilder) +export(removeArea) +export(removeBindingConstraint) +export(removeCluster) +export(removeClusterRES) +export(removeClusterST) +export(removeLink) +export(runSimulation) +export(runTsGenerator) +export(scenarioBuilder) +export(searchStudy) +export(setAPImode) +export(setPlaylist) +export(setSolverPath) +export(storage_values_default) +export(updateAdequacySettings) +export(updateGeneralSettings) +export(updateInputSettings) +export(updateOptimizationSettings) +export(updateOutputSettings) +export(updateScenarioBuilder) +export(useVariant) +export(writeEconomicOptions) +export(writeHydroValues) +export(writeIni) +export(writeIniAPI) +export(writeIniFile) +export(writeIniHydro) +export(writeInputTS) +export(writeMiscGen) +export(writeOutputValues) +export(writeSeriesPrepro) +export(writeVariantCommands) +export(writeWaterValues) +import(data.table) +import(doParallel) +import(parallel) +import(pbapply) +import(progressr) +importFrom(antaresRead,api_delete) +importFrom(antaresRead,api_get) +importFrom(antaresRead,api_post) +importFrom(antaresRead,api_put) +importFrom(antaresRead,getAreas) +importFrom(antaresRead,getLinks) +importFrom(antaresRead,readBindingConstraints) +importFrom(antaresRead,readClusterDesc) +importFrom(antaresRead,readClusterSTDesc) +importFrom(antaresRead,readIni) +importFrom(antaresRead,readIniFile) +importFrom(antaresRead,readInputTS) +importFrom(antaresRead,readLayout) +importFrom(antaresRead,setSimulationPath) +importFrom(antaresRead,setSimulationPathAPI) +importFrom(antaresRead,simOptions) +importFrom(assertthat,assert_that) +importFrom(data.table,":=") +importFrom(data.table,.SD) +importFrom(data.table,CJ) +importFrom(data.table,as.data.table) +importFrom(data.table,data.table) +importFrom(data.table,dcast) +importFrom(data.table,fread) +importFrom(data.table,fwrite) +importFrom(data.table,melt) +importFrom(data.table,month) +importFrom(data.table,rbindlist) +importFrom(data.table,setcolorder) +importFrom(data.table,setnames) +importFrom(data.table,transpose) +importFrom(data.table,yday) +importFrom(data.table,year) +importFrom(doFuture,registerDoFuture) +importFrom(future,plan) +importFrom(grDevices,col2rgb) +importFrom(httr,POST) +importFrom(httr,accept_json) +importFrom(httr,add_headers) +importFrom(httr,content) +importFrom(httr,content_type_json) +importFrom(httr,status_code) +importFrom(httr,stop_for_status) +importFrom(httr,upload_file) +importFrom(jsonlite,toJSON) +importFrom(jsonlite,write_json) +importFrom(lifecycle,deprecate_warn) +importFrom(lifecycle,deprecated) +importFrom(lifecycle,is_present) +importFrom(memuse,Sys.meminfo) +importFrom(plyr,ldply) +importFrom(plyr,llply) +importFrom(stats,as.formula) +importFrom(stats,sd) +importFrom(stats,setNames) +importFrom(utils,getFromNamespace) +importFrom(utils,head) +importFrom(utils,modifyList) +importFrom(utils,packageVersion) +importFrom(utils,read.table) +importFrom(utils,tail) +importFrom(utils,tar) +importFrom(utils,unzip) +importFrom(utils,write.table) +importFrom(whisker,whisker.render) +importFrom(yaml,read_yaml) diff --git a/NEWS.md b/NEWS.md index b0e23813..00728295 100644 --- a/NEWS.md +++ b/NEWS.md @@ -1,6 +1,45 @@ +# antaresEditObject 0.7.1.9000 + +### Breaking changes : + +* `createBindingConstraint()` / `editBindingConstraint()` uses metadata to check the group size of time series. +* `createBindingConstraintBulk()` checks consistency of groups passed as parameters and consistency with the study. +* `importZipStudyWeb()` can delete the zipfile and move the study in Antares Web to another folder +* delete `antaresRead-reexports.R` and adjust scripts to have a clean package +* `removeArea()` : send a warning instead of a stop if an area is referenced in a binding constraint coefficient +* `removeLink()` : send a warning instead of a stop if a link is referenced in a binding constraint coefficient +* `removeCluster()` : send a warning instead of a stop if a cluster is referenced in a binding constraint coefficient + +NEW FEATURES (Antares v8.8) : + +* `updateOptimizationSettings()` allows the user to update solver.log property +* `createClusterST()` / `editClusterST()` use new parameters and default values + + +BUGFIXES : + +* `createBindingConstraint()` in API mode (for study Second-member coupling constraint scenarios +> Scenarized RHS for binding constraints NEW FEATURES (Antares v8.7, cf. Antares v8.7 changelog) : diff --git a/R/antaresRead-reexports.R b/R/antaresRead-reexports.R deleted file mode 100644 index 74995dad..00000000 --- a/R/antaresRead-reexports.R +++ /dev/null @@ -1,20 +0,0 @@ - -#' Re-exports from antaresRead -#' -#' @importFrom antaresRead readIni -#' @name antaresRead-reexports -#' @export -#' @keywords internal -readIni <- antaresRead::readIni - -#' @importFrom antaresRead readIniFile -#' @rdname antaresRead-reexports -#' @export -#' @keywords internal -readIniFile <- antaresRead::readIniFile - -#' @importFrom antaresRead readIniAPI -#' @rdname antaresRead-reexports -#' @export -#' @keywords internal -readIniAPI <- antaresRead::readIniAPI diff --git a/R/computeTimeStampFromHourly.R b/R/computeTimeStampFromHourly.R index 61f04941..1646f9d7 100644 --- a/R/computeTimeStampFromHourly.R +++ b/R/computeTimeStampFromHourly.R @@ -380,8 +380,7 @@ cpt_timstamp <- function(Year, "Saturday", "Sunday") wd <- opts$firstWeekday - fj <- - antaresEditObject::readIniFile(file.path(opts$studyPath, "settings", "generaldata.ini")) + fj <- readIniFile(file.path(opts$studyPath, "settings", "generaldata.ini")) fj <- fj$general$january.1st fd <- which(wd == dw) fj <- which(fj == dw) diff --git a/R/createBindingConstraint.R b/R/createBindingConstraint.R index e50a652c..11bd536b 100644 --- a/R/createBindingConstraint.R +++ b/R/createBindingConstraint.R @@ -1,3 +1,5 @@ +utils::globalVariables(c('V2', 'dim_study', 'dim_input', 'name_group')) + #' @title Create a binding constraint #' #' @description @@ -5,7 +7,6 @@ #' `r lifecycle::badge("experimental")` #' #' Create a new binding constraint in an Antares study. -#' `createBindingConstraintBulk()` allow to create multiple constraints at once. #' #' #' @param name The name for the binding constraint. @@ -41,7 +42,7 @@ #' #' @name createBindingConstraint #' -#' @importFrom antaresRead getLinks setSimulationPath +#' @importFrom antaresRead getLinks setSimulationPath readIniFile #' @importFrom utils write.table #' #' @examples @@ -72,28 +73,6 @@ #' "area1%area3" = "2%3") #' ) #' -#' # Create multiple constraints -#' -#' # Prepare data for constraints -#' bindings_constraints <- lapply( -#' X = seq_len(100), -#' FUN = function(i) { -#' # use arguments of createBindingConstraint() -#' # all arguments must be provided ! -#' list( -#' name = paste0("constraints", i), -#' id = paste0("constraints", i), -#' values = matrix(data = rep(0, 8760 * 3), ncol = 3), -#' enabled = FALSE, -#' timeStep = "hourly", -#' operator = "both", -#' coefficients = list("area1%area2" = 1), -#' overwrite = TRUE -#' ) -#' } -#' ) -#' # create all constraints -#' createBindingConstraintBulk(bindings_constraints) #' #' # >= v8.7.0 : #' @@ -120,27 +99,6 @@ #' values = values_data, #' overwrite = TRUE) #' -#' # create multiple constraints -#' bindings_constraints <- lapply( -#' X = seq_len(10), -#' FUN = function(i) { -#' # use arguments of createBindingConstraint() -#' # all arguments must be provided ! -#' list( -#' name = paste0("constraints_bulk", i), -#' id = paste0("constraints_bulk", i), -#' values = values_data, -#' enabled = FALSE, -#' timeStep = "hourly", -#' operator = "both", -#' coefficients = list("at%fr" = 1), -#' group= "group_bulk", -#' overwrite = TRUE -#' ) -#' } -#' ) -#' -#' createBindingConstraintBulk(bindings_constraints) #' } createBindingConstraint <- function(name, id = tolower(name), @@ -176,9 +134,8 @@ createBindingConstraint <- function(name, identical(areas, sort(areas)) }) - if (!all(are_areas_sorted)) { + if (!all(are_areas_sorted)) stop("The areas are not sorted alphabetically.", call. = FALSE) - } } # API block @@ -211,11 +168,7 @@ createBindingConstraint <- function(name, if(is.null(group)) group <- "default" - values_operator <- switch(operator, - less = "lt", - equal = "eq", - greater = "gt", - both = c("lt", "gt")) + values_operator <- switch_to_list_name_operator_870(operator = operator) if(!is.null(values)){ assertthat::assert_that(inherits(values, "list")) @@ -231,8 +184,8 @@ createBindingConstraint <- function(name, call. = FALSE) # v870 : check group and values - # no check for add BC with NULL values - group_values_check(group_value = group, + # no check for add BC with NULL values + group_values_meta_check(group_value = group, values_data = values, operator_check = operator, output_operator = values_operator, @@ -292,7 +245,7 @@ createBindingConstraint <- function(name, "create_binding_constraint", name = body$name, enabled = body$enabled, - time_step = body$timeStep, + time_step = body$time_step, operator = body$operator, filter_year_by_year = body$filter_year_by_year, filter_synthesis = body$filter_synthesis, @@ -486,26 +439,29 @@ createBindingConstraint_ <- function(bindingConstraints, # Write values # v870 if(opts$antaresVersion>=870){ - # names_order_ts <- c("lt", "gt", "eq") + # make name file + path file + code file + # to write values matching operator name_file <- paste0(id, "_", output_operator, ".txt") - up_path <- file.path(opts$inputPath, "bindingconstraints", name_file) - lapply(up_path, function(x, df_ts= values, vect_path= up_path){ + df <- data.frame( + name_file = name_file, + code_file = output_operator, + path_file = up_path) + + # write txt file(s) + lapply(seq(nrow(df)), function(x, df_ts= values){ if(identical(df_ts, character(0))) - fwrite(x = data.table::as.data.table(df_ts), - file = x, - col.names = FALSE, - row.names = FALSE, - sep = "\t") + data_content <- data.table::as.data.table(df_ts) else{ - index <- grep(x = vect_path, pattern = x) - fwrite(x = data.table::as.data.table(df_ts[[index]]), - file = x, - col.names = FALSE, - row.names = FALSE, - sep = "\t") + target_name <- df[x, "code_file"] + data_content <- data.table::as.data.table(df_ts[[target_name]]) } + fwrite(x = data_content, + file = df[x, "path_file"], + col.names = FALSE, + row.names = FALSE, + sep = "\t") }) }else{ pathValues <- file.path(opts$inputPath, "bindingconstraints", paste0(id, ".txt")) @@ -519,9 +475,10 @@ createBindingConstraint_ <- function(bindingConstraints, } - #' @title Check dimension of time series for binding constraints -#' @description Only needed for study version >= 870 +#' @description Only needed for study version >= 870 +#' +#' Dimension of groups are compared with meta parameter `binding` returned by [antaresRead::simOptions()] #' @param group_value `character` name of group #' @param values_data `list` values used by the constraint #' @param operator_check `character` parameter "operator" @@ -530,7 +487,7 @@ createBindingConstraint_ <- function(bindingConstraints, #' @template opts #' @export #' @keywords internal -group_values_check <- function(group_value, +group_values_meta_check <- function(group_value, values_data, operator_check, output_operator, @@ -545,82 +502,48 @@ group_values_check <- function(group_value, return() } + # check dimension of new group (INPUT) + if(operator_check%in%"both"){ + lt_dim <- dim(values_data$lt)[2] + gt_dim <- dim(values_data$gt)[2] + if(lt_dim!=gt_dim) + stop("dimension of values are not similar ", + call. = FALSE) + p_col_new <- lt_dim + }else + p_col_new <- dim(values_data[[output_operator]])[2] + + # check meta + # study with no BC or virgin study + if(is.null(opts$binding)){ + cat("\nThere were no binding constraints in this study\n") + return() + } - # read existing binding constraint - # /!\/!\ function return "default values" (vector of 0) - existing_bc <- readBindingConstraints(opts = opts) + # read dimension + dim_bc_group <- opts$binding - # study with no BC or virgin study - if(is.null(existing_bc)) + # group already exists ? + # no duplicate groups in the study + is_exists <- grepl(pattern = group_value, + x = dim_bc_group[, .SD, .SDcols = 1]) + + if(!is_exists){ + cat("\nNew/existing group : ", + paste0("'", group_value, "'"), + " will be created/updated with dimension : ", + paste0("[", p_col_new, "]"), + "\n") return() + } - ## - # group creation - ## + # dimension of existing group + p_col <- dim_bc_group[name_group%in%group_value][["dim"]] - # check existing group Versus new group - existing_groups <- unlist( - lapply(existing_bc, - function(x){ - x[["properties"]][["group"]]}) - ) - search_group_index <- grep(pattern = group_value, - x = existing_groups) - - # new group ? - new_group <- identical(search_group_index, - integer(0)) - if(new_group) - message("New group ", "'", group_value, "'", " will be created") - - # check dimension values existing group Versus new group - if(!new_group){ - # check dimension of existing group - p_col <- sapply(existing_bc[search_group_index], - function(x){ - op <- x[["properties"]][["operator"]] - if(!op %in%"both") - dim(x[["values"]])[2] - else{ - lt_dim <- dim(x[["values"]][["less"]])[2] - gt_dim <- dim(x[["values"]][["greater"]])[2] - if(lt_dim!=gt_dim) - stop("dimension of values are not similar for constraint : ", - x$properties$id, call. = FALSE) - lt_dim - } - }) - - # keep dimension >1 - names(p_col) <- NULL - if(identical(p_col[p_col>1], - integer(0))){ - message("actual dimension of group : ", group_value, " is NULL or 1") - return(NULL) # continue process to write data - }else - p_col <- unique(p_col[p_col>1]) - message("actual dimension of group : ", group_value, " is ", p_col) - - # check dimension of new group - if(operator_check%in%"both"){ - lt_dim <- dim(values_data$lt)[2] - gt_dim <- dim(values_data$gt)[2] - if(lt_dim!=gt_dim) - stop("dimension of values are not similar ", - call. = FALSE) - p_col_new <- lt_dim - }else - p_col_new <- dim(values_data[[output_operator]])[2] - - # # no values provided - # if(is.null(p_col_new)) - # p_col_new <- 0 - - if(p_col!=p_col_new) # & p_col!=0 - stop(paste0("Put right columns dimension : ", - p_col, " for existing 'group' : ", - group_value), call. = FALSE) - } + if(p_col!=p_col_new) # & p_col!=0 + stop(paste0("Put right columns dimension : ", + p_col, " for existing 'group' : ", + group_value), call. = FALSE) } # v870 @@ -738,28 +661,101 @@ group_values_check <- function(group_value, } } - +#' @title Create multiple binding constraint at once. +#' @description #' `r lifecycle::badge("experimental")` +#' `r antaresEditObject:::badge_api_no()` #' @param constraints A `list` of several named `list` containing data to create binding constraints. #' **Warning** all arguments for creating a binding constraints must be provided, see examples. +#' @template opts +#' @family binding constraints functions +#' +#' @importFrom antaresRead getLinks setSimulationPath readIniFile +#' +#' @details +#' According to Antares version, usage may vary : +#' +#' **>= v8.7.0** : +#' - For each constraint name, one file .txt containing `_lt.txt, _gt.txt, _eq.txt`. +#' +#' - Parameter `values` must be named `list` ("lt", "gt", "eq") containing `data.frame` scenarized. +#' +#' - Add parameter `group` in input list `constraints` +#' +#' see example section below. #' @export #' -#' @rdname createBindingConstraint +#' @examples +#' \dontrun{ +#' # For Study version < v8.7.0 +#' # Create multiple constraints +#' +#' # Prepare data for constraints +#' bindings_constraints <- lapply( +#' X = seq_len(100), +#' FUN = function(i) { +#' # use arguments of createBindingConstraint() +#' # all arguments must be provided ! +#' list( +#' name = paste0("constraints", i), +#' id = paste0("constraints", i), +#' values = matrix(data = rep(0, 8760 * 3), ncol = 3), +#' enabled = FALSE, +#' timeStep = "hourly", +#' operator = "both", +#' coefficients = list("area1%area2" = 1), +#' overwrite = TRUE +#' ) +#' } +#' ) +#' # create all constraints +#' createBindingConstraintBulk(bindings_constraints) +#' +#' # For Study version >= v8.7.0 (add parameter `group`) +#' +#' # data values (hourly) +#' df <- matrix(data = rep(0, 8760 * 3), ncol = 3) +#' values_data <- list(lt=df, +#' gt= df) +#' +#' # create multiple constraints +#' bindings_constraints <- lapply( +#' X = seq_len(10), +#' FUN = function(i) { +#' # use arguments of createBindingConstraint() +#' # all arguments must be provided ! +#' list( +#' name = paste0("constraints_bulk", i), +#' id = paste0("constraints_bulk", i), +#' values = values_data, +#' enabled = FALSE, +#' timeStep = "hourly", +#' operator = "both", +#' coefficients = list("at%fr" = 1), +#' group= "group_bulk", +#' overwrite = TRUE +#' ) +#' } +#' ) +#' +#' createBindingConstraintBulk(bindings_constraints) +#' } +#' createBindingConstraintBulk <- function(constraints, opts = antaresRead::simOptions()) { + assertthat::assert_that(inherits(opts, "simOptions")) - ## Ini file - pathIni <- file.path(opts$inputPath, "bindingconstraints/bindingconstraints.ini") - bindingConstraints <- readIniFile(pathIni, stringsAsFactors = FALSE) + if(opts[["antaresVersion"]] >= 870) { + # check matrix dimension + .check_bulk_object_dim(constraints = constraints, opts = opts) + } + pathIni <- file.path(opts$inputPath, "bindingconstraints", "bindingconstraints.ini") + bindingConstraints <- readIniFile(pathIni, stringsAsFactors = FALSE) for (i in seq_along(constraints)) { - values_operator <- switch(constraints[[i]]$operator, - less = "lt", - equal = "eq", - greater = "gt", - both = c("lt", "gt")) + values_operator <- switch_to_list_name_operator_870(operator = constraints[[i]][["operator"]]) bindingConstraints <- do.call("createBindingConstraint_", c( constraints[[i]], @@ -777,8 +773,92 @@ createBindingConstraintBulk <- function(constraints, suppressWarnings({ res <- antaresRead::setSimulationPath(path = opts$studyPath, simulation = "input") }) + invisible(res) } +# control group dimensions in bulk object + # control object with study +.check_bulk_object_dim <- function(constraints, + opts = antaresRead::simOptions()){ + assertthat::assert_that(inherits(constraints, "list")) + + # check matrix number of columns by group + # In all_dim_group, group is column V1, number of columns is column V2 + matrix_dimension_by_constraint <- lapply(constraints, FUN = .compute_matrix_dimension_constraint) + all_dim_group <- do.call("rbind", c(matrix_dimension_by_constraint, fill = TRUE)) + # If each matrix is NULL, there is no second dimension in the table + if (dim(all_dim_group)[2] < 2) { + return() + } + + # Deduplicate rows and filter V2 > 1 + select_dim <- unique(all_dim_group)[V2 > 1] + + # Detect duplicated groups + duplicated_groups <- select_dim[duplicated(select_dim$V1),]$V1 + + if (!identical(duplicated_groups, character(0))) { + stop("Problem dimension with group : ", + paste0(duplicated_groups, sep = " "), + call. = FALSE) + } + + # check input object with study + if (is.null(opts[["binding"]])) { + return() + } + else{ + merge_groups <- merge.data.table(x = opts[["binding"]], + y = select_dim, + by.x ="name_group", + by.y = "V1") + + names(merge_groups) <- c("name_group", "dim_study", "dim_input") + + # check diff + diff_dim <- merge_groups[dim_study!=dim_input] + + if (nrow(diff_dim) > 0) { + stop("Problem dimension with group in Study: ", + paste0(diff_dim$name_group, sep = " "), + call. = FALSE) + } + } +} + + +switch_to_list_name_operator_870 <- function(operator) { + + assertthat::assert_that(operator %in% c("less", "greater", "equal", "both")) + + operator_symbol <- switch(operator, + "less" = "lt", + "equal" = "eq", + "greater" = "gt", + "both" = c("lt", "gt") + ) + + return(operator_symbol) +} + +# Compute the dimension of a matrix (if operatior is not "both") or 2 (if operatior is "both") in a constraint +.compute_matrix_dimension_constraint <- function(constraint){ + + assertthat::assert_that(inherits(constraint, "list")) + assertthat::assert_that(all(c("group", "operator", "values") %in% names(constraint))) + + res <- data.table() + + operator_symbol <- switch_to_list_name_operator_870(operator = constraint[["operator"]]) + dim_matrix <- lapply(constraint[["values"]][which(names(constraint[["values"]]) %in% operator_symbol)], dim) + dim_matrix <- dim_matrix[!sapply(dim_matrix, is.null)] + nb_matrix <- length(dim_matrix) + if (nb_matrix > 0) { + res <- data.table(rep(constraint[["group"]], nb_matrix), sapply(dim_matrix, "[[", 2)) + } + + return(res) +} diff --git a/R/createClusterST.R b/R/createClusterST.R index 114ac949..f5aa8950 100644 --- a/R/createClusterST.R +++ b/R/createClusterST.R @@ -7,31 +7,45 @@ #' #' @param area The area where to create the cluster. #' @param cluster_name Name for the cluster, it will prefixed by area name, unless you set `add_prefix = FALSE`. -#' @param group Group of the cluster, one of : "PSP_open", "PSP_closed", "Pondage", "Battery", "Other". It corresponds to the type of stockage. +#' @param group Group of the cluster, one of : "PSP_open", "PSP_closed", "Pondage", "Battery", "Other". +#' It corresponds to the type of stockage. #' @param storage_parameters `list ` Parameters to write in the Ini file (see `Note`). -#' @param PMAX_injection modulation of charging capacity on an 8760-hour basis. The values are float between 0 and 1. -#' @param PMAX_withdrawal modulation of discharging capacity on an 8760-hour basis. The values are float between 0 and 1. -#' @param inflows imposed withdrawals from the stock for other uses, The values are integer. -#' @param lower_rule_curve This is the lower limit for filling the stock imposed each hour. The values are float between 0 and 1. -#' @param upper_rule_curve This is the upper limit for filling the stock imposed each hour. The values are float between 0 and 1. +#' @param PMAX_injection Modulation of charging capacity on an 8760-hour basis. `numeric` \{0;1\} (8760*1). +#' @param PMAX_withdrawal Modulation of discharging capacity on an 8760-hour basis. `numeric` \{0;1\} (8760*1). +#' @param inflows Algebraic deviation of the state of charge of the storage, which does not induce any power +#' generation or consumption on the system `numeric` \{<0;>0\} (8760*1). +#' @param lower_rule_curve This is the lower limit for filling the stock imposed each hour. `numeric` \{0;1\} (8760*1). +#' @param upper_rule_curve This is the upper limit for filling the stock imposed each hour. `numeric` \{0;1\} (8760*1). #' @param add_prefix If `TRUE` (the default), `cluster_name` will be prefixed by area name. #' @param overwrite Logical, overwrite the cluster or not. #' #' @template opts #' @note #' To write parameters to the `list.ini` file. You have function `storage_values_default()` who is called by default. -#' This function return `list` containing six parameters for cluster `st-storage`. -#' See example section. +#' This function return `list` containing properties according study version for cluster `st-storage`. +#' +#' Study version >= "8.6.0" : +#' - efficiency = 1 (`numeric` \{0;1\}) +#' - reservoircapacity = 0 (`integer` >= 0) +#' - initiallevel = 0 (`numeric` \{0;1\}) +#' - withdrawalnominalcapacity = 0 (`integer` >= 0) +#' - injectionnominalcapacity = 0 (`integer` >= 0) +#' - initialleveloptim = FALSE (`logical` TRUE/FALSE) +#' +#' +#' Study version >= "8.8.0" (update + new parameter) : +#' - initiallevel = 0.5 (`numeric` \{0;1\}) +#' - enabled = TRUE (`logical` TRUE/FALSE) +#' +#' ⚠⚠⚠ +#' +#' By default, these values don't allow you to have an active cluster (See example section.) +#' +#' ⚠⚠⚠ #' -#' To write data (.txt file), you have parameter for each output file : -#' - PMAX-injection.txt -#' - PMAX-withdrawal.txt -#' - inflows.txt -#' - lower-rule-curve.txt -#' - upper-rule-curve.txt #' -#' @seealso [editClusterST()] to edit existing clusters, [readClusterSTDesc()] to read cluster, -#' [removeClusterST()] to remove clusters. +#' @seealso All the functions needed to manage a storage cluster, +#' [antaresRead::readClusterSTDesc()], [editClusterST()], [removeClusterST()]. #' #' @export #' @@ -57,13 +71,19 @@ #' # > "my_area_my_cluster" #' #' # create cluster with custom parameter and data +#' # use the function to create your own list of parameters (no Antares optim) +#' # if you want optim (my_parameters$initialleveloptim <- TRUE) #' my_parameters <- storage_values_default() #' my_parameters$efficiency <- 0.5 +#' my_parameters$initiallevel <- 10 +#' my_parameters$withdrawalnominalcapacity <- 100 +#' my_parameters$injectionnominalcapacity <- 1000 #' my_parameters$reservoircapacity <- 10000 #' -#' +#' # time series #' inflow_data <- matrix(3, 8760) #' ratio_data <- matrix(0.7, 8760) +#' #' createClusterST(area = "my_area", #' "my_cluster", #' storage_parameters = my_parameters, @@ -108,42 +128,24 @@ createClusterST <- function(area, " you should be using one of: ", paste(st_storage_group, collapse = ", ") ) - # check area existing in current study - area <- tolower(area) + # check area exsiting in current study check_area_name(area, opts) + area <- tolower(area) - # To avoid failure in an unit test (API is mocked) we add this block - api_study <- is_api_study(opts) - if (api_study && is_api_mocked(opts)) { - cluster_exists <- FALSE - } else { - cluster_exists <- check_cluster_name(area, cluster_name, add_prefix, opts) - } - - if (!api_study) { - if (cluster_exists & !overwrite) { - stop("Cluster already exists. Overwrite it with overwrite option or edit it with editClusterST().") - } - } - if (api_study) { - if (cluster_exists) { - stop("Cluster already exists. Edit it with editClusterST().") - } - } ## # check parameters (ini file) ## assertthat::assert_that(inherits(storage_parameters, "list")) - # static name of list parameters - names_parameters <- names(storage_values_default()) + # static name of list parameters + names_parameters <- names(storage_values_default(opts = opts)) if(!all(names(storage_parameters) %in% names_parameters)) stop(append("Parameter 'st-storage' must be named with the following elements: ", paste0(names_parameters, collapse= ", "))) - - # check values parameters - .st_mandatory_params(list_values = storage_parameters) + + # check values parameters + .st_mandatory_params(list_values = storage_parameters, opts = opts) # DATA parameters : default value + name txt file @@ -162,14 +164,34 @@ createClusterST <- function(area, # check syntax ini parameters params_cluster <- hyphenize_names(storage_parameters) - cluster_name <- generate_cluster_name(area, cluster_name, add_prefix) - params_cluster <- c(list(name = cluster_name, group = group),params_cluster) + if (add_prefix) + cluster_name <- paste(area, cluster_name, sep = "_") + params_cluster <- c(list(name = cluster_name, group = group), + params_cluster) ################# - # API block - if (api_study) { + if (is_api_study(opts)) { # format name for API cluster_name <- transform_name_to_id(cluster_name) + + # /!\ temporary solution /!\ + # as the endpoint does not return an error if the cluster already exist + if(!is_api_mocked(opts)){ + exists <- FALSE + suppressWarnings( + clusters <- readClusterSTDesc(opts = opts) + ) + if (nrow(clusters) > 0) { + area_filter <- area + clusters_filtered <- clusters[clusters$area == tolower(area_filter) & + clusters$cluster == cluster_name,] + exists <- nrow(clusters_filtered) > 0 + } + if(exists) + stop("Cluster already exists. Edit it with editClusterST().") + } + params_cluster$name <- cluster_name cmd <- api_command_generate( @@ -177,14 +199,14 @@ createClusterST <- function(area, area_id = area, parameters = params_cluster ) - + api_command_register(cmd, opts = opts) `if`( should_command_be_executed(opts), api_command_execute(cmd, opts = opts, text_alert = "{.emph create_st_storage}: {msg_api}"), cli_command_registered("create_st_storage") ) - + for (i in names(storage_value)){ if (!is.null(get(i))) { # format name for API @@ -209,11 +231,11 @@ createClusterST <- function(area, ) } } - + return(invisible(opts)) } ########################## - - + ## # parameters traitements @@ -222,25 +244,31 @@ createClusterST <- function(area, inputPath <- opts$inputPath assertthat::assert_that(!is.null(inputPath) && file.exists(inputPath)) - # named list for writing ini file - # params_cluster <- stats::setNames(object = list(params_cluster), nm = cluster_name) - # path to ini file containing clusters' name and parameters path_clusters_ini <- file.path(inputPath, "st-storage", "clusters", tolower(area), "list.ini") # read previous content of ini previous_params <- readIniFile(file = path_clusters_ini) - if (tolower(cluster_name) %in% tolower(names(previous_params)) & overwrite){ - ind_cluster <- which(tolower(names(previous_params)) %in% tolower(cluster_name))[1] - previous_params[[ind_cluster]] <- params_cluster - names(previous_params)[[ind_cluster]] <- cluster_name - } else { - previous_params[[cluster_name]] <- params_cluster + # already exists ? + if (tolower(cluster_name) %in% tolower(names(previous_params)) + & !overwrite) + stop(paste(cluster_name, "already exist")) + + # overwrite + if(overwrite){ + if(tolower(cluster_name) %in% tolower(names(previous_params))){ + ind_cluster <- which(tolower(names(previous_params)) %in% + tolower(cluster_name))[1] + previous_params[[ind_cluster]] <- params_cluster + names(previous_params)[[ind_cluster]] <- cluster_name + } } + + # add properties + previous_params[[cluster_name]] <- params_cluster - # params_cluster <- c(previous_params, params_cluster) - + # write properties (all properties are overwritten) writeIni( listData = previous_params, pathIni = path_clusters_ini, @@ -278,40 +306,35 @@ createClusterST <- function(area, }) invisible(res) - + } # check parameters (`list`) -#' @return `list` -.st_mandatory_params <- function(list_values){ - .is_ratio(list_values$efficiency, +.st_mandatory_params <- function(list_values, opts){ + .is_ratio(list_values[["efficiency"]], "efficiency") - .check_capacity(list_values$reservoircapacity, + .check_capacity(list_values[["reservoircapacity"]], "reservoircapacity") - # if(!list_values$reservoircapacity >= 0) - # stop("reservoircapacity must be >= 0", - # call. = FALSE) - .is_ratio(list_values$initiallevel, + .is_ratio(list_values[["initiallevel"]], "initiallevel") - .check_capacity(list_values$withdrawalnominalcapacity, + .check_capacity(list_values[["withdrawalnominalcapacity"]], "withdrawalnominalcapacity") - # if(!list_values$withdrawalnominalcapacity >= 0) - # stop("withdrawalnominalcapacity must be >= 0", - # call. = FALSE) - .check_capacity(list_values$injectionnominalcapacity, + .check_capacity(list_values[["injectionnominalcapacity"]], "injectionnominalcapacity") - # if(!list_values$injectionnominalcapacity >= 0) - # stop("injectionnominalcapacity must be >= 0", - # call. = FALSE) - if(!is.null(list_values$initialleveloptim)) - assertthat::assert_that(inherits(list_values$initialleveloptim, - "logical")) + if(!is.null(list_values[["initialleveloptim"]])) + assertthat::assert_that(inherits(list_values[["initialleveloptim"]], + "logical")) + + if (opts$antaresVersion >= 880) + if(!is.null(list_values[["enabled"]])) + assertthat::assert_that(inherits(list_values[["enabled"]], + "logical")) } .is_ratio <- function(x, mess){ @@ -334,18 +357,29 @@ createClusterST <- function(area, #' Short Term Storage Property List #' +#' @description +#' Default values are returned according to study version #' +#' @template opts #' @return a named list #' @export #' #' @examples +#' \dontrun{ #' storage_values_default() -storage_values_default <- function() { - list(efficiency = 1, - reservoircapacity = 0, - initiallevel = 0, - withdrawalnominalcapacity = 0, - injectionnominalcapacity = 0, - initialleveloptim = FALSE) +#' } +storage_values_default <- function(opts = simOptions()) { + lst_parameters <- list(efficiency = 1, + reservoircapacity = 0, + initiallevel = 0, + withdrawalnominalcapacity = 0, + injectionnominalcapacity = 0, + initialleveloptim = FALSE) + + if (opts$antaresVersion >= 880){ + lst_parameters$initiallevel <- 0.5 + lst_parameters$enabled <- TRUE + } + + return(lst_parameters) } - diff --git a/R/createStudy.R b/R/createStudy.R index 74a86a0f..376e0a80 100644 --- a/R/createStudy.R +++ b/R/createStudy.R @@ -8,7 +8,7 @@ #' @param study_name Name of the study. #' @param antares_version Antares number version. #' -#' @return Result of [antaresRead::setSimulationPath()] or [setSimulationPathAPI()] accordingly. +#' @return Result of [antaresRead::setSimulationPath()] or [antaresRead::setSimulationPathAPI()] accordingly. #' @export #' #' @name create-study diff --git a/R/editBindingConstraint.R b/R/editBindingConstraint.R index 84ce11d0..21eee4db 100644 --- a/R/editBindingConstraint.R +++ b/R/editBindingConstraint.R @@ -172,11 +172,11 @@ editBindingConstraint <- function(name, # check group values if(!is.null(values)) - group_values_check(group_value = group, - values_data = values, - operator_check = operator, - output_operator = values_operator, - opts = opts) + group_values_meta_check(group_value = group, + values_data = values, + operator_check = operator, + output_operator = values_operator, + opts = opts) } @@ -241,13 +241,17 @@ editBindingConstraint <- function(name, "bindingconstraints", name_file) - lapply(up_path, + df <- data.frame( + name_file = name_file, + code_file = values_operator, + path_file = up_path) + + lapply(seq(nrow(df)), function(x, - df_ts= values, - vect_path= up_path){ - index <- grep(x = vect_path, pattern = x) - fwrite(x = data.table::as.data.table(df_ts[[index]]), - file = x, + df_ts= values){ + target_name <- df[x, "code_file"] + fwrite(x = data.table::as.data.table(df_ts[[target_name]]), + file = df[x, "path_file"], col.names = FALSE, row.names = FALSE, sep = "\t") diff --git a/R/editClusterST.R b/R/editClusterST.R index 4bb683ce..17fedc6d 100644 --- a/R/editClusterST.R +++ b/R/editClusterST.R @@ -38,15 +38,6 @@ editClusterST <- function(area, check_active_ST(opts, check_dir = TRUE) check_area_name(area, opts) - api_study <- is_api_study(opts) - # To avoid failure in an unit test (API is mocked) we add this block - if (api_study && is_api_mocked(opts)) { - cluster_exists <- TRUE - } else { - cluster_exists <- check_cluster_name(area, cluster_name, add_prefix, opts) - } - cl_name_msg <- generate_cluster_name(area, cluster_name, add_prefix) - assertthat::assert_that(cluster_exists, msg = paste0("Cluster '", cl_name_msg, "' does not exist. It can not be edited.")) # statics groups st_storage_group <- c("PSP_open", "PSP_closed", @@ -72,14 +63,14 @@ editClusterST <- function(area, assertthat::assert_that(inherits(storage_parameters, "list")) # static name of list parameters - names_parameters <- names(storage_values_default()) + names_parameters <- names(storage_values_default(opts = opts)) if(!all(names(storage_parameters) %in% names_parameters)) stop(append("Parameter 'st-storage' must be named with the following elements: ", paste0(names_parameters, collapse= ", "))) # check values parameters - .st_mandatory_params(list_values = storage_parameters) + .st_mandatory_params(list_values = storage_parameters, opts = opts) # check list of parameters params_cluster <- hyphenize_names(storage_parameters) @@ -96,10 +87,28 @@ editClusterST <- function(area, params_cluster$group <- NULL ##### API block ---- - if (api_study) { + if (is_api_study(opts)) { # format name for API cluster_name <- transform_name_to_id(cluster_name) + # /!\ temporary solution /!\ + # as the endpoint does not return an error if the cluster does not exist + if(!is_api_mocked(opts)){ + exists <- FALSE + suppressWarnings( + clusters <- readClusterSTDesc(opts = opts) + ) + if (nrow(clusters) > 0) { + clusters_filtered <- clusters[clusters$area == tolower(area) & + clusters$cluster == cluster_name,] + exists <- nrow(clusters_filtered) > 0 + } + assertthat::assert_that(exists, + msg = paste0("Cluster '", + cluster_name, + "' does not exist. It can not be edited.")) + } + # update parameters if something else than name if (length(params_cluster) > 1) { currPath <- "input/st-storage/clusters/%s/list/%s" @@ -161,6 +170,15 @@ editClusterST <- function(area, # read previous content of ini previous_params <- readIniFile(file = path_clusters_ini) + if (!tolower(cluster_name) %in% tolower(names(previous_params))) + stop( + "'", + cluster_name, + "' doesn't exist, it can't be edited. You can create cluster with createCluster().", + call. = FALSE + ) + + # select existing cluster ind_cluster <- which(tolower(names(previous_params)) %in% tolower(cluster_name))[1] @@ -176,14 +194,10 @@ editClusterST <- function(area, ) } - - - ## # check DATA (series/) ## - # datas associated with cluster path_txt_file <- file.path(opts$inputPath, "st-storage", diff --git a/R/editLink.R b/R/editLink.R index be1dd37b..8cbfc9e2 100644 --- a/R/editLink.R +++ b/R/editLink.R @@ -20,6 +20,7 @@ #' @importFrom assertthat assert_that #' @importFrom stats setNames #' @importFrom utils read.table write.table modifyList +#' @importFrom data.table fwrite as.data.table #' #' @examples #' \dontrun{ @@ -44,14 +45,21 @@ editLink <- function(from, opts = antaresRead::simOptions()) { assertthat::assert_that(inherits(opts, "simOptions")) - + + if (!is.null(filter_synthesis)) { + filter_synthesis <- paste(filter_synthesis, collapse = ", ") + } + if (!is.null(filter_year_by_year)) { + filter_year_by_year <- paste(filter_year_by_year, collapse = ", ") + } + propertiesLink <- dropNulls(list( `hurdles-cost` = hurdles_cost, `transmission-capacities` = transmission_capacities, `asset-type` = asset_type, `display-comments` = display_comments, - `filter-synthesis` = paste(filter_synthesis,collapse = ", "), - `filter-year-by-year` = paste(filter_year_by_year,collapse = ", ") + `filter-synthesis` = filter_synthesis, + `filter-year-by-year` = filter_year_by_year )) # control areas name @@ -118,7 +126,7 @@ editLink <- function(from, direct <- last_cols indirect <- first_cols } - tsLink <- data.table::as.data.table(tsLink) + tsLink <- as.data.table(tsLink) } else { warning("tsLink will be ignored since Antares version < 820.", call. = FALSE) } @@ -224,8 +232,8 @@ editLink <- function(from, if (!is.null(dataLink)) { if (v820) { - data.table::fwrite( - x = data.table::as.data.table(dataLink), + fwrite( + x = as.data.table(dataLink), row.names = FALSE, col.names = FALSE, sep = "\t", @@ -237,8 +245,8 @@ editLink <- function(from, dataLink[, 1:2] <- dataLink[, 2:1] dataLink[, 4:5] <- dataLink[, 5:4] } - data.table::fwrite( - x = data.table::as.data.table(dataLink), + fwrite( + x = as.data.table(dataLink), row.names = FALSE, col.names = FALSE, sep = "\t", @@ -252,7 +260,7 @@ editLink <- function(from, if (!is.null(tsLink)) { if (v820) { dir.create(file.path(inputPath, "links", from, "capacities"), showWarnings = FALSE) - data.table::fwrite( + fwrite( x = tsLink[, .SD, .SDcols = direct], row.names = FALSE, col.names = FALSE, @@ -260,7 +268,7 @@ editLink <- function(from, scipen = 12, file = file.path(inputPath, "links", from, "capacities", paste0(to, "_direct.txt")) ) - data.table::fwrite( + fwrite( x = tsLink[, .SD, .SDcols = indirect], row.names = FALSE, col.names = FALSE, diff --git a/R/importStudyAPI.R b/R/importStudyAPI.R index 840aadf9..0b8ca74a 100644 --- a/R/importStudyAPI.R +++ b/R/importStudyAPI.R @@ -54,6 +54,8 @@ copyStudyWeb <- function(opts = antaresRead::simOptions(), host, token, #' @param host Host of AntaREST server API. #' @param token API personnal access token. #' @param zipfile_name Name of the zipfile of the study. +#' @param delete_zipfile Should the zipfile be deleted after upload. +#' @param folder_destination Folder of the study in Antares Web. #' #' @template opts #' @@ -62,26 +64,36 @@ copyStudyWeb <- function(opts = antaresRead::simOptions(), host, token, #' #' @export #' -importZipStudyWeb <- function(host, token, zipfile_name, opts = antaresRead::simOptions()) { +importZipStudyWeb <- function(host, token, zipfile_name, delete_zipfile = TRUE, folder_destination = NULL, opts = antaresRead::simOptions()) { - # Build the destination folder - dir_study <- unlist(strsplit(opts$studyPath, split = .Platform$file.sep)) - dir_study <- dir_study[seq(length(dir_study) - 1)] - dir_study <- do.call("file.path", as.list(dir_study)) + # Dstination folder + dir_study <- dirname(opts$studyPath) # Zip the study zipfile <- backupStudy(zipfile_name, what = "study", opts = opts, extension = ".zip") + zipfile_path <- file.path(dir_study, zipfile) # Import the study studyId <- api_post( opts = list(host = host, token = token), endpoint = "_import", default_endpoint = "v1/studies", - body = list(study = upload_file(file.path(dir_study, zipfile))), + body = list(study = upload_file(zipfile_path)), encode = "multipart" ) opts <- setSimulationPathAPI(host = host, token = token, study_id = studyId, simulation = "input") + # Move the study + if (!is.null(folder_destination)) { + api_put(opts = opts, + endpoint = file.path(paste0(opts$study_id, "/move?folder_dest=", folder_destination)), + default_endpoint = "v1/studies" + ) + } + + if (delete_zipfile) { + file.remove(zipfile_path) + } return(invisible(opts)) } \ No newline at end of file diff --git a/R/removeArea.R b/R/removeArea.R index 08fe0fbb..8eb57b74 100644 --- a/R/removeArea.R +++ b/R/removeArea.R @@ -215,14 +215,13 @@ checkRemovedArea <- function(area, all_files = TRUE, opts = antaresRead::simOpti # Cluster bc_not_remove_cluster <- character(0) clusters <- readClusterDesc(opts = opts) - clusters_area <- clusters[clusters$area == name, c("area", "cluster")] + clusters_area <- clusters[clusters$area == name,] if (nrow(clusters_area) > 0) { bc_not_remove_cluster <- detect_pattern_in_binding_constraint(pattern = paste0(clusters_area$area, ".", clusters_area$cluster), opts = opts) } bc_not_remove <- union(bc_not_remove_cluster, bc_not_remove_link) if (!identical(bc_not_remove, character(0))) { - message("The following binding constraints have the area to remove in a coefficient : ", paste0(bc_not_remove, collapse = ", ")) - stop("Can not remove the area ", name) + warning("The following binding constraints have the area to remove in a coefficient : ", paste0(bc_not_remove, collapse = ", ")) } } diff --git a/R/removeCluster.R b/R/removeCluster.R index 69e61a42..7989543e 100644 --- a/R/removeCluster.R +++ b/R/removeCluster.R @@ -127,7 +127,7 @@ removeClusterST <- function(area, if (api_study && api_mocked) { cluster_exists <- TRUE } else { - cluster_exists <- check_cluster_name(area, cluster_name, add_prefix, opts) + cluster_exists <- check_cluster_name(area_name = area, cluster_name = cluster_name, add_prefix = add_prefix, opts = opts) } assertthat::assert_that(cluster_exists, msg = "Cluster can not be removed. It does not exist.") } @@ -139,8 +139,7 @@ removeClusterST <- function(area, if (!api_study | (api_study && !api_mocked)) { bc_not_remove <- detect_pattern_in_binding_constraint(pattern = paste0(area, ".", cluster_name), opts = opts) if (!identical(bc_not_remove, character(0))) { - message("The following binding constraints have the cluster to remove as a coefficient : ", paste0(bc_not_remove, collapse = ", ")) - stop("Can not remove the cluster ", cluster_name, " in the area ", area, ".") + warning("The following binding constraints have the cluster to remove as a coefficient : ", paste0(bc_not_remove, collapse = ", ")) } } } diff --git a/R/removeLink.R b/R/removeLink.R index 7c33892d..500f9b72 100644 --- a/R/removeLink.R +++ b/R/removeLink.R @@ -45,8 +45,7 @@ removeLink <- function(from, to, opts = antaresRead::simOptions()) { # check if the link can be removed safely, i.e. the link is not referenced in a binding constraint bc_not_remove <- detect_pattern_in_binding_constraint(pattern = c(paste0(from, "%", to), paste0(to, "%", from)), opts = opts) if (!identical(bc_not_remove, character(0))) { - message("The following binding constraints have the link to remove as a coefficient : ", paste0(bc_not_remove, collapse = ", ")) - stop("Can not remove the link ", link) + warning("The following binding constraints have the link to remove as a coefficient : ", paste0(bc_not_remove, collapse = ", ")) } # API block diff --git a/R/updateGeneralSettings.R b/R/updateGeneralSettings.R index 9d59e2e0..4d70536f 100644 --- a/R/updateGeneralSettings.R +++ b/R/updateGeneralSettings.R @@ -19,34 +19,39 @@ #' printed out in an individual directory7 : #' Study_name/OUTPUT/simu_tag/Economy /mc-i-number #' @param derated See Antares General Reference Guide. -#' @param custom.ts.numbers See Antares General Reference Guide. -#' @param user.playlist See Antares General Reference Guide. -#' @param filtering See Antares General Reference Guide. -#' @param active.rules.scenario See Antares General Reference Guide. -#' @param generate See Antares General Reference Guide. -#' @param nbtimeseriesload See Antares General Reference Guide. -#' @param nbtimeserieshydro See Antares General Reference Guide. -#' @param nbtimeserieswind See Antares General Reference Guide. -#' @param nbtimeseriesthermal See Antares General Reference Guide. -#' @param nbtimeseriessolar See Antares General Reference Guide. -#' @param refreshtimeseries See Antares General Reference Guide. -#' @param intra.modal See Antares General Reference Guide. -#' @param inter.modal See Antares General Reference Guide. -#' @param refreshintervalload See Antares General Reference Guide. -#' @param refreshintervalhydro See Antares General Reference Guide. -#' @param refreshintervalwind See Antares General Reference Guide. -#' @param refreshintervalthermal See Antares General Reference Guide. -#' @param refreshintervalsolar See Antares General Reference Guide. -#' @param readonly See Antares General Reference Guide. +#' @param custom.scenario See Antares General Reference Guide (see link below). Replace custom.ts.numbers. +#' @param custom.ts.numbers See Antares General Reference Guide (see link below). Replaced by custom.scenario. +#' @param user.playlist See Antares General Reference Guide (see link below). +#' @param filtering See Antares General Reference Guide (see link below). +#' @param active.rules.scenario See Antares General Reference Guide (see link below). +#' @param generate See Antares General Reference Guide (see link below). +#' @param nbtimeseriesload See Antares General Reference Guide (see link below). +#' @param nbtimeserieshydro See Antares General Reference Guide (see link below). +#' @param nbtimeserieswind See Antares General Reference Guide (see link below). +#' @param nbtimeseriesthermal See Antares General Reference Guide (see link below). +#' @param nbtimeseriessolar See Antares General Reference Guide (see link below). +#' @param refreshtimeseries See Antares General Reference Guide (see link below). +#' @param intra.modal See Antares General Reference Guide (see link below). +#' @param inter.modal See Antares General Reference Guide (see link below). +#' @param refreshintervalload See Antares General Reference Guide (see link below). +#' @param refreshintervalhydro See Antares General Reference Guide (see link below). +#' @param refreshintervalwind See Antares General Reference Guide (see link below). +#' @param refreshintervalthermal See Antares General Reference Guide (see link below). +#' @param refreshintervalsolar See Antares General Reference Guide (see link below). +#' @param readonly See Antares General Reference Guide (see link below). #' @param geographic.trimming \code{logical} indicates whether to store the results for all time spans (FALSE) or for custom time spans (TRUE) +#' @param thematic.trimming See Antares General Reference Guide (see link below). #' @template opts #' #' @export #' #' @importFrom utils modifyList #' @importFrom assertthat assert_that -#' @importFrom antaresRead setSimulationPath +#' @importFrom antaresRead setSimulationPath readIniFile +#' @importFrom lifecycle is_present deprecate_warn deprecated #' +#' @seealso \href{https://antares-simulator.readthedocs.io/en/latest/user-guide/solver/04-parameters/}{Antares General Reference Guide} +#' #' @examples #' \dontrun{ #' @@ -69,7 +74,8 @@ updateGeneralSettings <- function(mode = NULL, leapyear = NULL, year.by.year = NULL, derated = NULL, - custom.ts.numbers = NULL, + custom.scenario = NULL, + custom.ts.numbers = deprecated(), user.playlist = NULL, filtering = NULL, active.rules.scenario = NULL, @@ -89,10 +95,20 @@ updateGeneralSettings <- function(mode = NULL, refreshintervalsolar = NULL, readonly = NULL, geographic.trimming = NULL, + thematic.trimming = NULL, opts = antaresRead::simOptions()) { assertthat::assert_that(inherits(opts, "simOptions")) + # Replace custom.ts.numbers argument by custom.scenario + if (lifecycle::is_present(custom.ts.numbers)) { + lifecycle::deprecate_warn(when = "0.7.1", + what = "updateGeneralSettings(custom.ts.numbers = )", + with = "updateGeneralSettings(custom.scenario = )" + ) + custom.scenario <- custom.ts.numbers + } + intra.modal <- check_param_modal(intra.modal, opts) inter.modal <- check_param_modal(inter.modal, opts) @@ -115,7 +131,7 @@ updateGeneralSettings <- function(mode = NULL, leapyear = leapyear, year.by.year = year.by.year, derated = derated, - custom.ts.numbers = custom.ts.numbers, + custom.scenario = custom.scenario, user.playlist = user.playlist, filtering = filtering, active.rules.scenario = active.rules.scenario, @@ -134,43 +150,39 @@ updateGeneralSettings <- function(mode = NULL, refreshintervalthermal = refreshintervalthermal, refreshintervalsolar = refreshintervalsolar, readonly = readonly, - geographic.trimming = geographic.trimming + geographic.trimming = geographic.trimming, + thematic.trimming = thematic.trimming ) - new_params <- dropNulls(new_params) - for (i in seq_along(new_params)) { - new_params[[i]] <- paste(as.character(new_params[[i]]), collapse = ", ") - names(new_params)[i] <- dicoGeneralSettings(names(new_params)[i]) - } + + new_params <- dropNulls(x = new_params) + new_params <- lapply(X = new_params, FUN = .format_ini_rhs) + names(new_params) <- sapply(names(new_params), dicoGeneralSettings, USE.NAMES = FALSE) # API block if (is_api_study(opts)) { - + writeIni(listData = new_params, pathIni = "settings/generaldata/general", opts = opts) return(update_api_opts(opts)) } - # read current settings - generaldatapath <- file.path(opts$studyPath, "settings", "generaldata.ini") + generaldatapath <- file.path(opts[["studyPath"]], "settings", "generaldata.ini") generaldata <- readIniFile(file = generaldatapath) - # update general field - l_general <- generaldata$general - - l_general <- utils::modifyList(x = l_general, val = new_params) - generaldata$general <- l_general + l_general <- generaldata[["general"]] + l_general <- modifyList(x = l_general, val = new_params) + generaldata[["general"]] <- l_general - # write writeIni(listData = generaldata, pathIni = generaldatapath, overwrite = TRUE, opts = opts) - # Maj simulation suppressWarnings({ - res <- antaresRead::setSimulationPath(path = opts$studyPath, simulation = "input") + res <- setSimulationPath(path = opts[["studyPath"]], simulation = "input") }) invisible(res) } + check_param_modal <- function(x, opts) { if (is.null(x)) return(NULL) @@ -219,6 +231,7 @@ check_param_RES <- function(x, opts) { return(x) } + check_param_links <- function(x, opts) { if (is.null(x)) return(NULL) @@ -232,6 +245,7 @@ check_param_links <- function(x, opts) { return(x) } + #' Correspondence between arguments of \code{updateGeneralSettings} and actual Antares parameters. #' #' @param arg An argument from function \code{updateGeneralSettings}. @@ -243,29 +257,28 @@ check_param_links <- function(x, opts) { #' @examples #' dicoGeneralSettings("year.by.year") # "year-by-year" dicoGeneralSettings <- function(arg) { - if (length(arg) > 1) + + if (length(arg) > 1) { stop("'arg' must be length one") + } antares_params <- as.list( c("mode", "horizon", "nbyears", "simulation.start", "simulation.end", "january.1st", "first-month-in-year", "first.weekday", "leapyear", - "year-by-year", "derated", "custom-ts-numbers", "user-playlist", + "year-by-year", "derated", "custom-scenario", "user-playlist", "filtering", "active-rules-scenario", "generate", "nbtimeseriesload", "nbtimeserieshydro", "nbtimeserieswind", "nbtimeseriesthermal", "nbtimeseriessolar", "refreshtimeseries", "intra-modal", "inter-modal", "refreshintervalload", "refreshintervalhydro", "refreshintervalwind", - "refreshintervalthermal", "refreshintervalsolar", "readonly", "geographic-trimming") + "refreshintervalthermal", "refreshintervalsolar", "readonly", "geographic-trimming", "thematic-trimming") ) names(antares_params) <- c("mode", "horizon", "nbyears", "simulation.start", "simulation.end", "january.1st", "first.month.in.year", "first.weekday", "leapyear", - "year.by.year", "derated", "custom.ts.numbers", "user.playlist", + "year.by.year", "derated", "custom.scenario", "user.playlist", "filtering", "active.rules.scenario", "generate", "nbtimeseriesload", "nbtimeserieshydro", "nbtimeserieswind", "nbtimeseriesthermal", "nbtimeseriessolar", "refreshtimeseries", "intra.modal", "inter.modal", "refreshintervalload", "refreshintervalhydro", "refreshintervalwind", - "refreshintervalthermal", "refreshintervalsolar", "readonly", "geographic.trimming") + "refreshintervalthermal", "refreshintervalsolar", "readonly", "geographic.trimming", "thematic.trimming") antares_params[[arg]] } - - - diff --git a/R/updateOptimizationSettings.R b/R/updateOptimizationSettings.R index e4d87d10..3f3ef7fa 100644 --- a/R/updateOptimizationSettings.R +++ b/R/updateOptimizationSettings.R @@ -17,6 +17,7 @@ #' @param include.spinningreserve true or false #' @param include.primaryreserve true or false #' @param include.exportmps true or false (since v8.3.2 can take also : none, optim-1, optim-2, both-optims) +#' @param solver.log true or false (available for version >= 8.8) #' @param power.fluctuations free modulations, minimize excursions or minimize ramping #' @param shedding.strategy share margins #' @param shedding.policy shave peaks or minimize duration @@ -53,6 +54,7 @@ updateOptimizationSettings <- function(simplex.range = NULL, include.spinningreserve = NULL, include.primaryreserve = NULL, include.exportmps = NULL, + solver.log = NULL, power.fluctuations = NULL, shedding.strategy = NULL, shedding.policy = NULL, @@ -104,7 +106,12 @@ updateOptimizationSettings <- function(simplex.range = NULL, assertthat::assert_that(include.exportmps %in% c("true", "false")) } } - + if (!is.null(solver.log)){ + if (opts$antaresVersion < 880){ + stop("updateOptimizationSettings: solver.log parameter is only available if using Antares >= 8.8.0", call. = FALSE) + } + assertthat::assert_that(solver.log %in% c("true", "false")) + } if (!is.null(power.fluctuations)) assertthat::assert_that( @@ -138,7 +145,8 @@ updateOptimizationSettings <- function(simplex.range = NULL, include.strategicreserve = include.strategicreserve, include.spinningreserve = include.spinningreserve, include.primaryreserve = include.primaryreserve, - include.exportmps = include.exportmps + include.exportmps = include.exportmps, + solver.log = solver.log )) for (i in seq_along(new_params_optimization)) { new_params_optimization[[i]] <- as.character(new_params_optimization[[i]]) @@ -252,7 +260,8 @@ dicoOptimizationSettings <- function(arg) { "unit-commitment-mode", "number-of-cores-mode", "renewable-generation-modelling", - "day-ahead-reserve-management" + "day-ahead-reserve-management", + "solver-log" ) ) @@ -275,7 +284,8 @@ dicoOptimizationSettings <- function(arg) { "unit.commitment.mode", "number.of.cores.mode", "renewable.generation.modelling", - "day.ahead.reserve.management" + "day.ahead.reserve.management", + "solver.log" ) antares_params[[arg]] diff --git a/R/updateOutputSettings.R b/R/updateOutputSettings.R index 98538edf..96ae6a4e 100644 --- a/R/updateOutputSettings.R +++ b/R/updateOutputSettings.R @@ -8,16 +8,20 @@ #' #' @param synthesis Logical. If TRUE, synthetic results will be stored in a #' directory Study_name/OUTPUT/simu_tag/Economy/mc-all. If FALSE, No general -#' synthesis will be printed out. -#' @param storenewset Logical. See Antares General Reference Guide. -#' @param archives Character vector. Series to archive. -#' @param result.format Character. Output format (txt-files or zip). +#' synthesis will be printed out. See Antares General Reference Guide (see link below). +#' @param storenewset Logical. See Antares General Reference Guide (see link below). +#' @param archives Character vector. Series to archive. See Antares General Reference Guide (see link below). +#' @param result.format Character. Output format (txt-files or zip). See Antares General Reference Guide (see link below). #' #' @template opts #' #' @export #' #' @importFrom assertthat assert_that +#' @importFrom utils modifyList +#' @importFrom antaresRead readIniFile +#' +#' @seealso \href{https://antares-simulator.readthedocs.io/en/latest/user-guide/solver/04-parameters/}{Antares General Reference Guide} #' #' @examples #' \dontrun{ @@ -39,43 +43,62 @@ updateOutputSettings <- function(synthesis = NULL, assertthat::assert_that(inherits(opts, "simOptions")) + new_params <- list( + synthesis = synthesis, + storenewset = storenewset, + archives = archives, + result.format = result.format + ) + + new_params <- dropNulls(x = new_params) + + new_params <- lapply(X = new_params, FUN = .format_ini_rhs) + names(new_params) <- sapply(names(new_params), dicoOutputSettings, USE.NAMES = FALSE) + # API block if (is_api_study(opts)) { - writeIni( - listData = list( - synthesis = synthesis, - storenewset = storenewset, - archives = paste(archives, collapse = ", "), - `result-format` = result.format - ), - pathIni = "settings/generaldata/output", - opts = opts - ) + writeIni(listData = new_params, pathIni = "settings/generaldata/output", opts = opts) return(update_api_opts(opts)) } - pathIni <- file.path(opts$studyPath, "settings", "generaldata.ini") - general <- readIniFile(file = pathIni) + generaldatapath <- file.path(opts[["studyPath"]], "settings", "generaldata.ini") + generaldata <- readIniFile(file = generaldatapath) - outputs <- general$output - if (!is.null(synthesis)) - outputs$synthesis <- synthesis - if (!is.null(storenewset)) - outputs$storenewset <- storenewset - if (!is.null(archives)) - outputs$archives <- paste(archives, collapse = ", ") - if (!is.null(result.format)) - outputs$`result-format` <- result.format - general$output <- outputs + l_output <- generaldata[["output"]] + l_output <- modifyList(x = l_output, val = new_params) + generaldata[["output"]] <- l_output + + writeIni(listData = generaldata, pathIni = generaldatapath, overwrite = TRUE, opts = opts) - writeIni(listData = general, pathIni = pathIni, overwrite = TRUE) - - # Maj simulation suppressWarnings({ - res <- antaresRead::setSimulationPath(path = opts$studyPath, simulation = "input") + res <- setSimulationPath(path = opts[["studyPath"]], simulation = "input") }) invisible(res) } + + +#' Correspondence between arguments of \code{updateOutputSettings} and actual Antares parameters. +#' +#' @param arg An argument from function \code{updateOutputSettings}. +#' +#' @return The corresponding Antares general parameter. +#' +#' @export +#' +#' @examples +#' dicoOutputSettings("result.format") # "result-format" +dicoOutputSettings <- function(arg) { + + if (length(arg) > 1) { + stop("'arg' must be length one") + } + + antares_params <- as.list(c("synthesis", "storenewset", "archives", "result-format")) + + names(antares_params) <- c("synthesis", "storenewset", "archives", "result.format") + + return(antares_params[[arg]]) +} diff --git a/R/utils.R b/R/utils.R index 3128a567..060cacc3 100644 --- a/R/utils.R +++ b/R/utils.R @@ -159,18 +159,32 @@ generate_cluster_name <- function(area, cluster_name, add_prefix) { } -#' @importFrom antaresRead readClusterSTDesc -check_cluster_name <- function(area, cluster_name, add_prefix, opts = antaresRead::simOptions()) { +#' @importFrom antaresRead readClusterSTDesc simOptions +check_cluster_name <- function(area_name, cluster_name, add_prefix, opts = simOptions()) { exists <- FALSE clusters <- readClusterSTDesc(opts = opts) if (nrow(clusters) > 0) { - cluster_name <- generate_cluster_name(area, cluster_name, add_prefix) - clusters_filtered <- clusters[clusters$area == tolower(area) & clusters$cluster == cluster_name,] + cluster_name <- generate_cluster_name(area = area_name, cluster_name = cluster_name, add_prefix = add_prefix) + clusters_filtered <- clusters[clusters$area == tolower(area_name) & clusters$cluster == cluster_name,] exists <- nrow(clusters_filtered) > 0 } return(exists) } + +#' @title Format a value to a suitable format to rhs in an .ini file. +#' +#' @param value The value to format. +#' +#' @return the formatted value +.format_ini_rhs <- function(value){ + # Convert logical to a lower case character to match the default existing file + if (inherits(x = value, what = "logical")) { + value <- tolower(value) + } + + return(paste(as.character(value), collapse = ", ")) +} diff --git a/_pkgdown.yml b/_pkgdown.yml index 8814734d..efeb2614 100644 --- a/_pkgdown.yml +++ b/_pkgdown.yml @@ -1,3 +1,5 @@ +development: + mode: auto destination: docs template: params: diff --git a/man/antaresRead-reexports.Rd b/man/antaresRead-reexports.Rd deleted file mode 100644 index c45412cc..00000000 --- a/man/antaresRead-reexports.Rd +++ /dev/null @@ -1,19 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/antaresRead-reexports.R -\name{antaresRead-reexports} -\alias{antaresRead-reexports} -\alias{readIni} -\alias{readIniFile} -\alias{readIniAPI} -\title{Re-exports from antaresRead} -\usage{ -readIni(pathIni, opts = antaresRead::simOptions(), default_ext = ".ini") - -readIniFile(file, stringsAsFactors = FALSE) - -readIniAPI(study_id, path, host, token = NULL) -} -\description{ -Re-exports from antaresRead -} -\keyword{internal} diff --git a/man/create-study.Rd b/man/create-study.Rd index 2bfa540c..a274b962 100644 --- a/man/create-study.Rd +++ b/man/create-study.Rd @@ -31,7 +31,7 @@ if it doesn't exist, it'll be created.} \item{...}{Other query parameters passed to POST request.} } \value{ -Result of \code{\link[antaresRead:setSimulationPath]{antaresRead::setSimulationPath()}} or \code{\link[=setSimulationPathAPI]{setSimulationPathAPI()}} accordingly. +Result of \code{\link[antaresRead:setSimulationPath]{antaresRead::setSimulationPath()}} or \code{\link[antaresRead:setSimulationPath]{antaresRead::setSimulationPathAPI()}} accordingly. } \description{ Create study on disk or with AntaREST server through the API. diff --git a/man/createBindingConstraint.Rd b/man/createBindingConstraint.Rd index 3566141f..7114c8f3 100644 --- a/man/createBindingConstraint.Rd +++ b/man/createBindingConstraint.Rd @@ -2,7 +2,6 @@ % Please edit documentation in R/createBindingConstraint.R \name{createBindingConstraint} \alias{createBindingConstraint} -\alias{createBindingConstraintBulk} \title{Create a binding constraint} \usage{ createBindingConstraint( @@ -19,8 +18,6 @@ createBindingConstraint( overwrite = FALSE, opts = antaresRead::simOptions() ) - -createBindingConstraintBulk(constraints, opts = antaresRead::simOptions()) } \arguments{ \item{name}{The name for the binding constraint.} @@ -51,9 +48,6 @@ weight or weight with offset.} \item{opts}{List of simulation parameters returned by the function \code{\link[antaresRead:setSimulationPath]{antaresRead::setSimulationPath()}}} - -\item{constraints}{A \code{list} of several named \code{list} containing data to create binding constraints. -\strong{Warning} all arguments for creating a binding constraints must be provided, see examples.} } \value{ An updated list containing various information about the simulation. @@ -63,7 +57,6 @@ An updated list containing various information about the simulation. \ifelse{html}{\href{https://lifecycle.r-lib.org/articles/stages.html#experimental}{\figure{lifecycle-experimental.svg}{options: alt='[Experimental]'}}}{\strong{[Experimental]}} Create a new binding constraint in an Antares study. -\code{createBindingConstraintBulk()} allow to create multiple constraints at once. } \details{ According to Antares version, usage may vary : @@ -102,28 +95,6 @@ createBindingConstraint( "area1\%area3" = "2\%3") ) -# Create multiple constraints - -# Prepare data for constraints -bindings_constraints <- lapply( - X = seq_len(100), - FUN = function(i) { - # use arguments of createBindingConstraint() - # all arguments must be provided ! - list( - name = paste0("constraints", i), - id = paste0("constraints", i), - values = matrix(data = rep(0, 8760 * 3), ncol = 3), - enabled = FALSE, - timeStep = "hourly", - operator = "both", - coefficients = list("area1\%area2" = 1), - overwrite = TRUE - ) - } -) -# create all constraints -createBindingConstraintBulk(bindings_constraints) # >= v8.7.0 : @@ -150,31 +121,11 @@ createBindingConstraint(name = "bc_example", values = values_data, overwrite = TRUE) -# create multiple constraints -bindings_constraints <- lapply( - X = seq_len(10), - FUN = function(i) { - # use arguments of createBindingConstraint() - # all arguments must be provided ! - list( - name = paste0("constraints_bulk", i), - id = paste0("constraints_bulk", i), - values = values_data, - enabled = FALSE, - timeStep = "hourly", - operator = "both", - coefficients = list("at\%fr" = 1), - group= "group_bulk", - overwrite = TRUE - ) - } -) - -createBindingConstraintBulk(bindings_constraints) } } \seealso{ Other binding constraints functions: +\code{\link{createBindingConstraintBulk}()}, \code{\link{editBindingConstraint}()}, \code{\link{removeBindingConstraint}()} } diff --git a/man/createBindingConstraintBulk.Rd b/man/createBindingConstraintBulk.Rd new file mode 100644 index 00000000..223c0c9b --- /dev/null +++ b/man/createBindingConstraintBulk.Rd @@ -0,0 +1,98 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/createBindingConstraint.R +\name{createBindingConstraintBulk} +\alias{createBindingConstraintBulk} +\title{Create multiple binding constraint at once.} +\usage{ +createBindingConstraintBulk(constraints, opts = antaresRead::simOptions()) +} +\arguments{ +\item{constraints}{A \code{list} of several named \code{list} containing data to create binding constraints. +\strong{Warning} all arguments for creating a binding constraints must be provided, see examples.} + +\item{opts}{List of simulation parameters returned by the function +\code{\link[antaresRead:setSimulationPath]{antaresRead::setSimulationPath()}}} +} +\value{ +An updated list containing various information about the simulation. +} +\description{ +\ifelse{html}{\href{https://lifecycle.r-lib.org/articles/stages.html#experimental}{\figure{lifecycle-experimental.svg}{options: alt='[Experimental]'}}}{\strong{[Experimental]}} +\ifelse{html}{\figure{badge_api_no.svg}{options: alt='Antares API NO'}}{Antares API: \strong{NO}} +} +\details{ +According to Antares version, usage may vary : + +\strong{>= v8.7.0} : +\itemize{ +\item For each constraint name, one file .txt containing \verb{_lt.txt, _gt.txt, _eq.txt}. +\item Parameter \code{values} must be named \code{list} ("lt", "gt", "eq") containing \code{data.frame} scenarized. +\item Add parameter \code{group} in input list \code{constraints} +} + +see example section below. +} +\examples{ +\dontrun{ +# For Study version < v8.7.0 +# Create multiple constraints + +# Prepare data for constraints +bindings_constraints <- lapply( + X = seq_len(100), + FUN = function(i) { + # use arguments of createBindingConstraint() + # all arguments must be provided ! + list( + name = paste0("constraints", i), + id = paste0("constraints", i), + values = matrix(data = rep(0, 8760 * 3), ncol = 3), + enabled = FALSE, + timeStep = "hourly", + operator = "both", + coefficients = list("area1\%area2" = 1), + overwrite = TRUE + ) + } +) +# create all constraints +createBindingConstraintBulk(bindings_constraints) + +# For Study version >= v8.7.0 (add parameter `group`) + +# data values (hourly) +df <- matrix(data = rep(0, 8760 * 3), ncol = 3) +values_data <- list(lt=df, + gt= df) + +# create multiple constraints +bindings_constraints <- lapply( + X = seq_len(10), + FUN = function(i) { + # use arguments of createBindingConstraint() + # all arguments must be provided ! + list( + name = paste0("constraints_bulk", i), + id = paste0("constraints_bulk", i), + values = values_data, + enabled = FALSE, + timeStep = "hourly", + operator = "both", + coefficients = list("at\%fr" = 1), + group= "group_bulk", + overwrite = TRUE + ) + } +) + +createBindingConstraintBulk(bindings_constraints) +} + +} +\seealso{ +Other binding constraints functions: +\code{\link{createBindingConstraint}()}, +\code{\link{editBindingConstraint}()}, +\code{\link{removeBindingConstraint}()} +} +\concept{binding constraints functions} diff --git a/man/createClusterST.Rd b/man/createClusterST.Rd index b50ea632..8c23eb91 100644 --- a/man/createClusterST.Rd +++ b/man/createClusterST.Rd @@ -24,19 +24,21 @@ createClusterST( \item{cluster_name}{Name for the cluster, it will prefixed by area name, unless you set \code{add_prefix = FALSE}.} -\item{group}{Group of the cluster, one of : "PSP_open", "PSP_closed", "Pondage", "Battery", "Other". It corresponds to the type of stockage.} +\item{group}{Group of the cluster, one of : "PSP_open", "PSP_closed", "Pondage", "Battery", "Other". +It corresponds to the type of stockage.} \item{storage_parameters}{\code{list } Parameters to write in the Ini file (see \code{Note}).} -\item{PMAX_injection}{modulation of charging capacity on an 8760-hour basis. The values are float between 0 and 1.} +\item{PMAX_injection}{Modulation of charging capacity on an 8760-hour basis. \code{numeric} \{0;1\} (8760*1).} -\item{PMAX_withdrawal}{modulation of discharging capacity on an 8760-hour basis. The values are float between 0 and 1.} +\item{PMAX_withdrawal}{Modulation of discharging capacity on an 8760-hour basis. \code{numeric} \{0;1\} (8760*1).} -\item{inflows}{imposed withdrawals from the stock for other uses, The values are integer.} +\item{inflows}{Algebraic deviation of the state of charge of the storage, which does not induce any power +generation or consumption on the system \code{numeric} \{<0;>0\} (8760*1).} -\item{lower_rule_curve}{This is the lower limit for filling the stock imposed each hour. The values are float between 0 and 1.} +\item{lower_rule_curve}{This is the lower limit for filling the stock imposed each hour. \code{numeric} \{0;1\} (8760*1).} -\item{upper_rule_curve}{This is the upper limit for filling the stock imposed each hour. The values are float between 0 and 1.} +\item{upper_rule_curve}{This is the upper limit for filling the stock imposed each hour. \code{numeric} \{0;1\} (8760*1).} \item{add_prefix}{If \code{TRUE} (the default), \code{cluster_name} will be prefixed by area name.} @@ -55,17 +57,29 @@ Create a new ST-storage cluster for >= v8.6.0 Antares studies. } \note{ To write parameters to the \code{list.ini} file. You have function \code{storage_values_default()} who is called by default. -This function return \code{list} containing six parameters for cluster \code{st-storage}. -See example section. +This function return \code{list} containing properties according study version for cluster \code{st-storage}. -To write data (.txt file), you have parameter for each output file : +Study version >= "8.6.0" : \itemize{ -\item PMAX-injection.txt -\item PMAX-withdrawal.txt -\item inflows.txt -\item lower-rule-curve.txt -\item upper-rule-curve.txt +\item efficiency = 1 (\code{numeric} \{0;1\}) +\item reservoircapacity = 0 (\code{integer} >= 0) +\item initiallevel = 0 (\code{numeric} \{0;1\}) +\item withdrawalnominalcapacity = 0 (\code{integer} >= 0) +\item injectionnominalcapacity = 0 (\code{integer} >= 0) +\item initialleveloptim = FALSE (\code{logical} TRUE/FALSE) } + +Study version >= "8.8.0" (update + new parameter) : +\itemize{ +\item initiallevel = 0.5 (\code{numeric} \{0;1\}) +\item enabled = TRUE (\code{logical} TRUE/FALSE) +} + +âš âš âš  + +By default, these values don't allow you to have an active cluster (See example section.) + +âš âš âš  } \examples{ \dontrun{ @@ -84,13 +98,19 @@ levels(readClusterSTDesc()$cluster) # > "my_area_my_cluster" # create cluster with custom parameter and data + # use the function to create your own list of parameters (no Antares optim) + # if you want optim (my_parameters$initialleveloptim <- TRUE) my_parameters <- storage_values_default() my_parameters$efficiency <- 0.5 +my_parameters$initiallevel <- 10 +my_parameters$withdrawalnominalcapacity <- 100 +my_parameters$injectionnominalcapacity <- 1000 my_parameters$reservoircapacity <- 10000 - + # time series inflow_data <- matrix(3, 8760) ratio_data <- matrix(0.7, 8760) + createClusterST(area = "my_area", "my_cluster", storage_parameters = my_parameters, @@ -103,6 +123,6 @@ createClusterST(area = "my_area", } \seealso{ -\code{\link[=editClusterST]{editClusterST()}} to edit existing clusters, \code{\link[=readClusterSTDesc]{readClusterSTDesc()}} to read cluster, -\code{\link[=removeClusterST]{removeClusterST()}} to remove clusters. +All the functions needed to manage a storage cluster, +\code{\link[antaresRead:readClusterDesc]{antaresRead::readClusterSTDesc()}}, \code{\link[=editClusterST]{editClusterST()}}, \code{\link[=removeClusterST]{removeClusterST()}}. } diff --git a/man/dicoOutputSettings.Rd b/man/dicoOutputSettings.Rd new file mode 100644 index 00000000..99da9422 --- /dev/null +++ b/man/dicoOutputSettings.Rd @@ -0,0 +1,20 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/updateOutputSettings.R +\name{dicoOutputSettings} +\alias{dicoOutputSettings} +\title{Correspondence between arguments of \code{updateOutputSettings} and actual Antares parameters.} +\usage{ +dicoOutputSettings(arg) +} +\arguments{ +\item{arg}{An argument from function \code{updateOutputSettings}.} +} +\value{ +The corresponding Antares general parameter. +} +\description{ +Correspondence between arguments of \code{updateOutputSettings} and actual Antares parameters. +} +\examples{ +dicoOutputSettings("result.format") # "result-format" +} diff --git a/man/dot-format_ini_rhs.Rd b/man/dot-format_ini_rhs.Rd new file mode 100644 index 00000000..ec7424eb --- /dev/null +++ b/man/dot-format_ini_rhs.Rd @@ -0,0 +1,17 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/utils.R +\name{.format_ini_rhs} +\alias{.format_ini_rhs} +\title{Format a value to a suitable format to rhs in an .ini file.} +\usage{ +.format_ini_rhs(value) +} +\arguments{ +\item{value}{The value to format.} +} +\value{ +the formatted value +} +\description{ +Format a value to a suitable format to rhs in an .ini file. +} diff --git a/man/editBindingConstraint.Rd b/man/editBindingConstraint.Rd index d2536510..0ceb8c74 100644 --- a/man/editBindingConstraint.Rd +++ b/man/editBindingConstraint.Rd @@ -115,6 +115,7 @@ editBindingConstraint(name = "myconstraint", } \seealso{ Other binding constraints functions: +\code{\link{createBindingConstraintBulk}()}, \code{\link{createBindingConstraint}()}, \code{\link{removeBindingConstraint}()} } diff --git a/man/group_values_check.Rd b/man/group_values_meta_check.Rd similarity index 79% rename from man/group_values_check.Rd rename to man/group_values_meta_check.Rd index 1e4e1899..9b6c0c5b 100644 --- a/man/group_values_check.Rd +++ b/man/group_values_meta_check.Rd @@ -1,10 +1,10 @@ % Generated by roxygen2: do not edit by hand % Please edit documentation in R/createBindingConstraint.R -\name{group_values_check} -\alias{group_values_check} +\name{group_values_meta_check} +\alias{group_values_meta_check} \title{Check dimension of time series for binding constraints} \usage{ -group_values_check( +group_values_meta_check( group_value, values_data, operator_check, @@ -31,5 +31,7 @@ An updated list containing various information about the simulation. } \description{ Only needed for study version >= 870 + +Dimension of groups are compared with meta parameter \code{binding} returned by \code{\link[antaresRead:simOptions]{antaresRead::simOptions()}} } \keyword{internal} diff --git a/man/importZipStudyWeb.Rd b/man/importZipStudyWeb.Rd index 8bdb7eec..2ceb834e 100644 --- a/man/importZipStudyWeb.Rd +++ b/man/importZipStudyWeb.Rd @@ -4,7 +4,14 @@ \alias{importZipStudyWeb} \title{Import a local study to Antares Web} \usage{ -importZipStudyWeb(host, token, zipfile_name, opts = antaresRead::simOptions()) +importZipStudyWeb( + host, + token, + zipfile_name, + delete_zipfile = TRUE, + folder_destination = NULL, + opts = antaresRead::simOptions() +) } \arguments{ \item{host}{Host of AntaREST server API.} @@ -13,6 +20,10 @@ importZipStudyWeb(host, token, zipfile_name, opts = antaresRead::simOptions()) \item{zipfile_name}{Name of the zipfile of the study.} +\item{delete_zipfile}{Should the zipfile be deleted after upload.} + +\item{folder_destination}{Folder of the study in Antares Web.} + \item{opts}{List of simulation parameters returned by the function \code{\link[antaresRead:setSimulationPath]{antaresRead::setSimulationPath()}}} } diff --git a/man/removeBindingConstraint.Rd b/man/removeBindingConstraint.Rd index 5caf004d..0754a89c 100644 --- a/man/removeBindingConstraint.Rd +++ b/man/removeBindingConstraint.Rd @@ -51,6 +51,7 @@ removeBindingConstraint(group = group_to_delete) } \seealso{ Other binding constraints functions: +\code{\link{createBindingConstraintBulk}()}, \code{\link{createBindingConstraint}()}, \code{\link{editBindingConstraint}()} } diff --git a/man/storage_values_default.Rd b/man/storage_values_default.Rd index b389b9e6..ba483658 100644 --- a/man/storage_values_default.Rd +++ b/man/storage_values_default.Rd @@ -4,14 +4,22 @@ \alias{storage_values_default} \title{Short Term Storage Property List} \usage{ -storage_values_default() +storage_values_default(opts = simOptions()) +} +\arguments{ +\item{opts}{List of simulation parameters returned by the function +\code{\link[antaresRead:setSimulationPath]{antaresRead::setSimulationPath()}}} } \value{ +An updated list containing various information about the simulation. + a named list } \description{ -Short Term Storage Property List +Default values are returned according to study version } \examples{ +\dontrun{ storage_values_default() } +} diff --git a/man/updateGeneralSettings.Rd b/man/updateGeneralSettings.Rd index d62ed913..698cdc00 100644 --- a/man/updateGeneralSettings.Rd +++ b/man/updateGeneralSettings.Rd @@ -16,7 +16,8 @@ updateGeneralSettings( leapyear = NULL, year.by.year = NULL, derated = NULL, - custom.ts.numbers = NULL, + custom.scenario = NULL, + custom.ts.numbers = deprecated(), user.playlist = NULL, filtering = NULL, active.rules.scenario = NULL, @@ -36,6 +37,7 @@ updateGeneralSettings( refreshintervalsolar = NULL, readonly = NULL, geographic.trimming = NULL, + thematic.trimming = NULL, opts = antaresRead::simOptions() ) } @@ -65,46 +67,50 @@ Study_name/OUTPUT/simu_tag/Economy /mc-i-number} \item{derated}{See Antares General Reference Guide.} -\item{custom.ts.numbers}{See Antares General Reference Guide.} +\item{custom.scenario}{See Antares General Reference Guide (see link below). Replace custom.ts.numbers.} -\item{user.playlist}{See Antares General Reference Guide.} +\item{custom.ts.numbers}{See Antares General Reference Guide (see link below). Replaced by custom.scenario.} -\item{filtering}{See Antares General Reference Guide.} +\item{user.playlist}{See Antares General Reference Guide (see link below).} -\item{active.rules.scenario}{See Antares General Reference Guide.} +\item{filtering}{See Antares General Reference Guide (see link below).} -\item{generate}{See Antares General Reference Guide.} +\item{active.rules.scenario}{See Antares General Reference Guide (see link below).} -\item{nbtimeseriesload}{See Antares General Reference Guide.} +\item{generate}{See Antares General Reference Guide (see link below).} -\item{nbtimeserieshydro}{See Antares General Reference Guide.} +\item{nbtimeseriesload}{See Antares General Reference Guide (see link below).} -\item{nbtimeserieswind}{See Antares General Reference Guide.} +\item{nbtimeserieshydro}{See Antares General Reference Guide (see link below).} -\item{nbtimeseriesthermal}{See Antares General Reference Guide.} +\item{nbtimeserieswind}{See Antares General Reference Guide (see link below).} -\item{nbtimeseriessolar}{See Antares General Reference Guide.} +\item{nbtimeseriesthermal}{See Antares General Reference Guide (see link below).} -\item{refreshtimeseries}{See Antares General Reference Guide.} +\item{nbtimeseriessolar}{See Antares General Reference Guide (see link below).} -\item{intra.modal}{See Antares General Reference Guide.} +\item{refreshtimeseries}{See Antares General Reference Guide (see link below).} -\item{inter.modal}{See Antares General Reference Guide.} +\item{intra.modal}{See Antares General Reference Guide (see link below).} -\item{refreshintervalload}{See Antares General Reference Guide.} +\item{inter.modal}{See Antares General Reference Guide (see link below).} -\item{refreshintervalhydro}{See Antares General Reference Guide.} +\item{refreshintervalload}{See Antares General Reference Guide (see link below).} -\item{refreshintervalwind}{See Antares General Reference Guide.} +\item{refreshintervalhydro}{See Antares General Reference Guide (see link below).} -\item{refreshintervalthermal}{See Antares General Reference Guide.} +\item{refreshintervalwind}{See Antares General Reference Guide (see link below).} -\item{refreshintervalsolar}{See Antares General Reference Guide.} +\item{refreshintervalthermal}{See Antares General Reference Guide (see link below).} -\item{readonly}{See Antares General Reference Guide.} +\item{refreshintervalsolar}{See Antares General Reference Guide (see link below).} + +\item{readonly}{See Antares General Reference Guide (see link below).} \item{geographic.trimming}{\code{logical} indicates whether to store the results for all time spans (FALSE) or for custom time spans (TRUE)} +\item{thematic.trimming}{See Antares General Reference Guide (see link below).} + \item{opts}{List of simulation parameters returned by the function \code{\link[antaresRead:setSimulationPath]{antaresRead::setSimulationPath()}}} } @@ -128,3 +134,6 @@ updateGeneralSettings(generate = c("thermal", "hydro")) } } +\seealso{ +\href{https://antares-simulator.readthedocs.io/en/latest/user-guide/solver/04-parameters/}{Antares General Reference Guide} +} diff --git a/man/updateOptimizationSettings.Rd b/man/updateOptimizationSettings.Rd index 7f7ca076..34270428 100644 --- a/man/updateOptimizationSettings.Rd +++ b/man/updateOptimizationSettings.Rd @@ -16,6 +16,7 @@ updateOptimizationSettings( include.spinningreserve = NULL, include.primaryreserve = NULL, include.exportmps = NULL, + solver.log = NULL, power.fluctuations = NULL, shedding.strategy = NULL, shedding.policy = NULL, @@ -50,6 +51,8 @@ null-for-all-links, infinite-for-all-links, null-for-physical-links, infinite-fo \item{include.exportmps}{true or false (since v8.3.2 can take also : none, optim-1, optim-2, both-optims)} +\item{solver.log}{true or false (available for version >= 8.8)} + \item{power.fluctuations}{free modulations, minimize excursions or minimize ramping} \item{shedding.strategy}{share margins} diff --git a/man/updateOutputSettings.Rd b/man/updateOutputSettings.Rd index 06d561a7..61b58ff5 100644 --- a/man/updateOutputSettings.Rd +++ b/man/updateOutputSettings.Rd @@ -15,13 +15,13 @@ updateOutputSettings( \arguments{ \item{synthesis}{Logical. If TRUE, synthetic results will be stored in a directory Study_name/OUTPUT/simu_tag/Economy/mc-all. If FALSE, No general -synthesis will be printed out.} +synthesis will be printed out. See Antares General Reference Guide (see link below).} -\item{storenewset}{Logical. See Antares General Reference Guide.} +\item{storenewset}{Logical. See Antares General Reference Guide (see link below).} -\item{archives}{Character vector. Series to archive.} +\item{archives}{Character vector. Series to archive. See Antares General Reference Guide (see link below).} -\item{result.format}{Character. Output format (txt-files or zip).} +\item{result.format}{Character. Output format (txt-files or zip). See Antares General Reference Guide (see link below).} \item{opts}{List of simulation parameters returned by the function \code{\link[antaresRead:setSimulationPath]{antaresRead::setSimulationPath()}}} @@ -47,3 +47,6 @@ updateOutputSettings( } } +\seealso{ +\href{https://antares-simulator.readthedocs.io/en/latest/user-guide/solver/04-parameters/}{Antares General Reference Guide} +} diff --git a/tests/testthat/test-createArea.R b/tests/testthat/test-createArea.R index baf2dd6d..d6f1f3af 100644 --- a/tests/testthat/test-createArea.R +++ b/tests/testthat/test-createArea.R @@ -358,12 +358,12 @@ test_that("removeArea(): check that area is removed if it is not referenced in a # Area opts <- createArea(name = new_area, opts = simOptions()) - expect_no_error(removeArea(name = new_area, opts = simOptions())) + expect_no_warning(removeArea(name = new_area, opts = simOptions())) # Area + Link opts <- createArea(name = new_area, opts = simOptions()) opts <- createLink(from = "zone1", to = new_area, opts = simOptions()) - expect_no_error(removeArea(name = new_area, opts = simOptions())) + expect_no_warning(removeArea(name = new_area, opts = simOptions())) # Area + Link + Binding Constraint opts <- createArea(name = new_area, opts = simOptions()) @@ -377,23 +377,20 @@ test_that("removeArea(): check that area is removed if it is not referenced in a coefficients = coefs, values = matrix(rep(0, nb_values_per_matrix), ncol = nb_cols_per_matrix), opts = simOptions()) - expect_error(removeArea(name = new_area, opts = simOptions()), - regexp = paste0("Can not remove the area ", new_area) + expect_warning(removeArea(name = new_area, opts = simOptions()), + regexp = "The following binding constraints have the area to remove in a coefficient : " ) - removeBindingConstraint(name = name_bc, opts = simOptions()) - expect_no_error(removeArea(name = new_area, opts = simOptions())) - new_area <- "zzone_bc_cluster" # Area opts <- createArea(name = new_area, opts = simOptions()) - expect_no_error(removeArea(name = new_area, opts = simOptions())) + expect_no_warning(removeArea(name = new_area, opts = simOptions())) # Area + Cluster opts <- createArea(name = new_area, opts = simOptions()) opts <- createCluster(area = new_area, cluster_name = "nuclear", add_prefix = TRUE, opts = simOptions()) - expect_no_error(removeArea(name = new_area, opts = simOptions())) + expect_no_warning(removeArea(name = new_area, opts = simOptions())) # Area + Cluster + Binding Constraint opts <- createArea(name = new_area, opts = simOptions()) @@ -408,12 +405,11 @@ test_that("removeArea(): check that area is removed if it is not referenced in a coefficients = coefs, values = matrix(rep(0, nb_values_per_matrix), ncol = nb_cols_per_matrix), opts = simOptions()) - expect_error(removeArea(name = new_area, opts = simOptions()), - regexp = paste0("Can not remove the area ", new_area) + expect_warning(removeArea(name = new_area, opts = simOptions()), + regexp = "The following binding constraints have the area to remove in a coefficient : " ) removeBindingConstraint(name = name_bc, opts = simOptions()) - expect_no_error(removeArea(name = new_area, opts = simOptions())) new_area <- "zzone_bc_cluster_link" @@ -431,12 +427,11 @@ test_that("removeArea(): check that area is removed if it is not referenced in a coefficients = coefs, values = matrix(rep(0, nb_values_per_matrix), ncol = nb_cols_per_matrix), opts = simOptions()) - expect_error(removeArea(name = new_area, opts = simOptions()), - regexp = paste0("Can not remove the area ", new_area) + expect_warning(removeArea(name = new_area, opts = simOptions()), + regexp = "The following binding constraints have the area to remove in a coefficient : " ) removeBindingConstraint(name = name_bc, opts = simOptions()) - expect_no_error(removeArea(name = new_area, opts = simOptions())) new_area <- "zzone_bc_cluster_link_2" @@ -454,17 +449,14 @@ test_that("removeArea(): check that area is removed if it is not referenced in a coefficients = coefs, values = matrix(rep(0, nb_values_per_matrix), ncol = nb_cols_per_matrix), opts = simOptions()) - expect_error(removeArea(name = new_area, opts = simOptions()), - regexp = paste0("Can not remove the area ", new_area) + expect_warning(removeArea(name = new_area, opts = simOptions()), + regexp = "The following binding constraints have the area to remove in a coefficient : " ) - removeBindingConstraint(name = name_bc, opts = simOptions()) - expect_no_error(removeArea(name = new_area, opts = simOptions())) - # standard areas for (area in my_areas) { - expect_error(removeArea(name = area, opts = simOptions()), - regexp = paste0("Can not remove the area ", area) + expect_warning(removeArea(name = area, opts = simOptions()), + regexp = "The following binding constraints have the area to remove in a coefficient : " ) } diff --git a/tests/testthat/test-createBindingConstraint.R b/tests/testthat/test-createBindingConstraint.R index 223d2837..f4509eab 100644 --- a/tests/testthat/test-createBindingConstraint.R +++ b/tests/testthat/test-createBindingConstraint.R @@ -226,6 +226,36 @@ sapply(studies, function(study) { expect_warning(removeBindingConstraint(name = "myimaginaryconstraint")) }) + ## bulk ---- + test_that("createBindingConstraintBulk v710", { + # Prepare data for constraints + bindings_constraints <- lapply( + X = seq_len(5), + FUN = function(i) { + # use arguments of createBindingConstraint() + # all arguments must be provided ! + list( + name = paste0("constraints_bulk", i), + id = paste0("constraints_bulk", i), + values = matrix(data = rep(1, 8760 * 3), ncol = 3), + enabled = FALSE, + timeStep = "hourly", + operator = "both", + coefficients = list("a%b" = 1), + overwrite = TRUE + ) + } + ) + # create all constraints + createBindingConstraintBulk(bindings_constraints) + + # tests + testthat::expect_true("constraints_bulk1" %in% + names(readBindingConstraints())) + testthat::expect_true("constraints_bulk5" %in% + names(readBindingConstraints())) + }) + # remove temporary study unlink(x = file.path(pathstd, "test_case"), recursive = TRUE) @@ -324,7 +354,7 @@ test_that("createBindingConstraint (default group value) v8.7", { path_file_bc <- paste0(file.path(path_bc, "myconstraint"), operator_bc, ".txt") - # read .txt + # read .txt (test values) res <- lapply(path_file_bc, antaresRead:::fread_antares, opts = opts_test) @@ -352,6 +382,77 @@ test_that("createBindingConstraint (default group value) v8.7", { testthat::expect_equal(dim(scenar_values$lt)[2], dim(bc$myconstraint2$values$less)[2]) + # for both + operator_bc <- c("_lt", "_gt") + path_bc <- file.path(opts_test$inputPath, "bindingconstraints") + path_file_bc <- paste0(file.path(path_bc, "myconstraint2"), + operator_bc, ".txt") + + # read .txt (test values) + res <- lapply(path_file_bc, + antaresRead:::fread_antares, + opts = opts_test) + + # txt files (test real value) + # test just first values cause code convert 8760 to 8784 with 0 + testthat::expect_equal(head(res[[1]]), + head(data.table::as.data.table(scenar_values$lt))) + testthat::expect_equal(head(res[[2]]), + head(data.table::as.data.table(scenar_values$gt))) + + # for greater + createBindingConstraint( + name = "myconstraint_gr8ter", + values = scenar_values, + enabled = FALSE, + timeStep = "hourly", + operator = "greater", + coefficients = c("at%fr" = 1)) + + bc <- readBindingConstraints() + + operator_bc <- c("_gt") + path_bc <- file.path(opts_test$inputPath, "bindingconstraints") + path_file_bc <- paste0(file.path(path_bc, "myconstraint_gr8ter"), + operator_bc, ".txt") + + # read .txt (test values) + res <- lapply(path_file_bc, + antaresRead:::fread_antares, + opts = opts_test) + + # txt files (test real value) + # test just first values cause code convert 8760 to 8784 with 0 + testthat::expect_equal(head(res[[1]]), + head(data.table::as.data.table(scenar_values$gt))) + + # for equal + createBindingConstraint( + name = "myconstraint_equal", + values = scenar_values, + enabled = FALSE, + timeStep = "hourly", + operator = "equal", + coefficients = c("at%fr" = 1)) + + bc <- readBindingConstraints() + + operator_bc <- c("_eq") + path_bc <- file.path(opts_test$inputPath, "bindingconstraints") + path_file_bc <- paste0(file.path(path_bc, "myconstraint_equal"), + operator_bc, ".txt") + + # read .txt (test values) + res <- lapply(path_file_bc, + antaresRead:::fread_antares, + opts = opts_test) + + # txt files (test real value) + # test just first values cause code convert 8760 to 8784 with 0 + testthat::expect_equal(head(res[[1]]), + head(data.table::as.data.table(scenar_values$eq))) + + ### error dim ---- # add BC with daily values (different columns dimension ERROR) testthat::expect_error( @@ -361,9 +462,8 @@ test_that("createBindingConstraint (default group value) v8.7", { enabled = FALSE, timeStep = "daily", operator = "both", - coefficients = c("at%fr" = 1), - opts = opts_test - ), regexp = "Put right columns dimension" + coefficients = c("at%fr" = 1)), + regexp = "Put right columns dimension" ) }) @@ -399,9 +499,7 @@ testthat::test_that("createBindingConstraint with new group v8.7",{ timeStep = "hourly", operator = "less", group = name_group, - coefficients = c("at%fr" = 1), - opts = opts_test - ) + coefficients = c("at%fr" = 1)) # ADD binding with multi cols df_multi_col <- scenar_values["lt"] @@ -571,12 +669,278 @@ test_that("createBindingConstraintBulk v8.7", { # tests testthat::expect_true("constraints_bulk1" %in% - names(readBindingConstraints(opts = opts_test))) + names(readBindingConstraints())) testthat::expect_true("constraints_bulk10" %in% - names(readBindingConstraints(opts = opts_test))) + names(readBindingConstraints())) + + + + test_that("test bad dimension object INPUT v8.7", { + bad_object <- list( + name = paste0("constraints_bulkBAD"), + id = paste0("constraints_bulkBAD"), + values = scenar_values_daily, + enabled = FALSE, + timeStep = "hourly", + operator = "both", + coefficients = list("at%fr" = 1), + group= "group_bulk", + overwrite = TRUE + ) + + bad_object <- append(list(bad_object), bindings_constraints) + + expect_error( + createBindingConstraintBulk(bad_object), + regexp = "Problem dimension with group" + ) + + }) + +}) + + + +test_that("test bad dimension object with existing object in study v8.7", { + bad_object <- list( + name = paste0("constraints_bulkBAD"), + id = paste0("constraints_bulkBAD"), + values = scenar_values_daily, + enabled = FALSE, + timeStep = "hourly", + operator = "both", + coefficients = list("at%fr" = 1), + group= "group_bulk", + overwrite = TRUE + ) + + expect_error( + createBindingConstraintBulk(list(bad_object)), + regexp = "Problem dimension with group" + ) + +}) + +test_that("test NULL VALUES in study v8.7", { + BC_NULL_VALUES <- list( + name = paste0("constraints_bulkNULL"), + id = paste0("constraints_bulkNULL"), + values = NULL, + enabled = FALSE, + timeStep = "hourly", + operator = "both", + coefficients = list("at%fr" = 1), + group= "group_bulk", + overwrite = TRUE + ) + + createBindingConstraintBulk(list(BC_NULL_VALUES)) + + # tests + testthat::expect_true("constraints_bulkNULL" %in% + names(readBindingConstraints())) + + # read real value + operator_bc <- c("_lt", "_gt") + path_bc <- file.path(opts_test$inputPath, "bindingconstraints") + path_file_bc <- paste0(file.path(path_bc, "constraints_bulkNULL"), + operator_bc, ".txt") + + # read .txt (test values) + res <- lapply(path_file_bc, + antaresRead:::fread_antares, + opts = opts_test) + + res <- unlist(res) + + # txt files are empty + testthat::expect_equal(res, NULL) + +}) + +test_that("test mixed VALUES in study v8.7", { + BC_MIX_VALUES <- list( + list( + name = paste0("constraints_bulkNULL"), + id = paste0("constraints_bulkNULL"), + values = NULL, + enabled = FALSE, + timeStep = "hourly", + operator = "both", + coefficients = list("at%fr" = 1), + group= "group_bulk", + overwrite = TRUE + ), + list( + name = paste0("constraints_bulk_value"), + id = paste0("constraints_bulk_value"), + values = scenar_values, + enabled = FALSE, + timeStep = "hourly", + operator = "greater", + coefficients = list("at%fr" = 1), + group= "group_bulk", + overwrite = TRUE + )) + + createBindingConstraintBulk(BC_MIX_VALUES) + + # tests + testthat::expect_true(all( + c("constraints_bulkNULL", "constraints_bulk_value") %in% + names(readBindingConstraints()))) + + # read real value + # NULL + operator_bc <- c("_lt", "_gt") + path_bc <- file.path(opts_test$inputPath, "bindingconstraints") + path_file_bc <- paste0(file.path(path_bc, "constraints_bulkNULL"), + operator_bc, ".txt") + + # read .txt (test values) + res <- lapply(path_file_bc, + antaresRead:::fread_antares, + opts = opts_test) + + res <- unlist(res) + + # txt files are empty + testthat::expect_equal(res, NULL) + + # VALUE + operator_bc <- c("_gt") + path_bc <- file.path(opts_test$inputPath, "bindingconstraints") + path_file_bc <- paste0(file.path(path_bc, "constraints_bulk_value"), + operator_bc, ".txt") + + # read .txt (test values) + res <- lapply(path_file_bc, + antaresRead:::fread_antares, + opts = opts_test) + + # txt files + testthat::expect_equal(head(res[[1]]), + head(data.table::as.data.table(scenar_values$gt))) + + }) +test_that("Control of matrix dimension is not dependent of the order in the list of the values", { + + val_cstr1 <- list("lt" = matrix(data = rep(0, 8760 * 1), ncol = 1), + "gt" = matrix(data = rep(555, 8760 * 3), ncol = 3), + "eq" = matrix(data = rep(0, 8760 * 1), ncol = 1) + ) + val_cstr2 <- list("lt" = matrix(data = rep(0, 8760 * 1), ncol = 1), + "eq" = matrix(data = rep(0, 8760 * 1), ncol = 1), + "gt" = matrix(data = rep(777, 8760 * 5), ncol = 5) + ) + lst_cstr <- list( + list( + name = "cstr1", + id = "cstr1", + values = val_cstr1, + enabled = TRUE, + timeStep = "hourly", + operator = "greater", + coefficients = list("at%fr" = 1), + group= "group_bulk_123", + overwrite = TRUE + ), + list( + name = "cstr2", + id = "cstr2", + values = val_cstr2, + enabled = TRUE, + timeStep = "hourly", + operator = "greater", + coefficients = list("at%fr" = 1), + group= "group_bulk_123", + overwrite = TRUE + ) + ) + expect_error( + createBindingConstraintBulk(constraints = lst_cstr, opts = simOptions()), + regexp = "Problem dimension with group" + ) + + val_cstr1 <- list("lt" = matrix(data = rep(444, 8760 * 2), ncol = 2), + "gt" = matrix(data = rep(555, 8760 * 3), ncol = 3), + "eq" = matrix(data = rep(0, 8760 * 1), ncol = 1) + ) + val_cstr2 <- list("lt" = matrix(data = rep(0, 8760 * 1), ncol = 1), + "eq" = matrix(data = rep(0, 8760 * 1), ncol = 1), + "gt" = matrix(data = rep(777, 8760 * 5), ncol = 5) + ) + lst_cstr <- list( + list( + name = "cstr1", + id = "cstr1", + values = val_cstr1, + enabled = TRUE, + timeStep = "hourly", + operator = "both", + coefficients = list("at%fr" = 1), + group= "group_bulk_both", + overwrite = TRUE + ), + list( + name = "cstr2", + id = "cstr2", + values = val_cstr2, + enabled = TRUE, + timeStep = "hourly", + operator = "greater", + coefficients = list("at%fr" = 1), + group= "group_bulk_123", + overwrite = TRUE + ) + ) + expect_error( + createBindingConstraintBulk(constraints = lst_cstr, opts = simOptions()), + regexp = "Problem dimension with group" + ) + + val_cstr1 <- list("gt" = NULL, + "lt" = matrix(data = rep(555, 8760 * 3), ncol = 3), + "eq" = matrix(data = rep(0, 8760 * 1), ncol = 1) + ) + val_cstr2 <- list("lt" = matrix(data = rep(0, 8760 * 1), ncol = 1), + "eq" = matrix(data = rep(0, 8760 * 1), ncol = 1), + "gt" = matrix(data = rep(777, 8760 * 5), ncol = 5) + ) + lst_cstr <- list( + list( + name = "cstr1", + id = "cstr1", + values = val_cstr1, + enabled = TRUE, + timeStep = "hourly", + operator = "both", + coefficients = list("at%fr" = 1), + group= "group_bulk_123", + overwrite = TRUE + ), + list( + name = "cstr2", + id = "cstr2", + values = val_cstr2, + enabled = TRUE, + timeStep = "hourly", + operator = "greater", + coefficients = list("at%fr" = 1), + group= "group_bulk_123", + overwrite = TRUE + ) + ) + expect_error( + createBindingConstraintBulk(constraints = lst_cstr, opts = simOptions()), + regexp = "Problem dimension with group" + ) + +}) + # remove temporary study ---- deleteStudy() diff --git a/tests/testthat/test-createCluster.R b/tests/testthat/test-createCluster.R index a315c56b..725b919d 100644 --- a/tests/testthat/test-createCluster.R +++ b/tests/testthat/test-createCluster.R @@ -106,21 +106,6 @@ test_that("Create cluster with pollutants params (new feature v8.6)",{ createArea(name = "test") - test_that("Create cluster default call (new feature v8.6)",{ - # default call now create without pollutants - createCluster(area = getAreas()[1], - cluster_name = "cluster_default", - overwrite = TRUE) - - res_cluster <- antaresRead::readClusterDesc() - - pollutants_names <- names(antaresEditObject::list_pollutants_values()) - - # check default values - testthat::expect_false(all( - pollutants_names %in% names(res_cluster))) - }) - test_that("Create cluster with bad parameter pollutant",{ bad_pollutants_param <- "not_a_list" @@ -226,9 +211,8 @@ test_that("removeCluster() : cluster is not removed if it is referenced in a bin suppressWarnings(opts <- setSimulationPath(path = opts$studyPath, simulation = "input")) - expect_error(removeCluster(area = "zone1", cluster_name = "nuclear", add_prefix = TRUE, opts = opts), regexp = "Can not remove the cluster") - removeBindingConstraint(name = "bc_nuclear", opts = opts) - expect_no_error(removeCluster(area = "zone1", cluster_name = "nuclear", add_prefix = TRUE, opts = opts)) + expect_warning(removeCluster(area = "zone1", cluster_name = "nuclear", add_prefix = TRUE, opts = opts), + regexp = "The following binding constraints have the cluster to remove as a coefficient :") unlink(x = opts$studyPath, recursive = TRUE) }) diff --git a/tests/testthat/test-createClusterST.R b/tests/testthat/test-createClusterST.R index 8a92f18d..04b65c34 100644 --- a/tests/testthat/test-createClusterST.R +++ b/tests/testthat/test-createClusterST.R @@ -1,5 +1,5 @@ - +# >=860 ---- test_that("Create short-term storage cluster (new feature v8.6)",{ ## basics errors cases ---- suppressWarnings( @@ -28,7 +28,8 @@ test_that("Create short-term storage cluster (new feature v8.6)",{ # cluster already exist in given area, with same name and group createClusterST(area_test_clust, - cluster_test_name, group_test_name, + cluster_test_name, + group_test_name, add_prefix = TRUE) testthat::expect_error(createClusterST(area_test_clust, @@ -55,7 +56,7 @@ test_that("Create short-term storage cluster (new feature v8.6)",{ info_clusters <- readClusterSTDesc() info_clusters <- info_clusters[cluster %in% namecluster_check, ] - # default values + # default values (only v860 properties) default_values <- storage_values_default() info_clusters <- info_clusters[, .SD, .SDcols= names(default_values)] @@ -172,218 +173,6 @@ test_that("Create short-term storage cluster (new feature v8.6)",{ }) -test_that("Test the behaviour of createClusterST() if the ST cluster already exists", { - - ant_version <- "8.6.0" - st_test <- paste0("my_study_860_", paste0(sample(letters,5),collapse = "")) - suppressWarnings(opts <- createStudy(path = pathstd, study_name = st_test, antares_version = ant_version)) - area <- "zone51" - createArea(area) - suppressWarnings(opts <- setSimulationPath(opts$studyPath, simulation = "input")) - - val <- 0.7 - val_mat <- matrix(val, 8760) - cl_name <- "test_storage" - createClusterST(area = area, - cluster_name = cl_name, - storage_parameters = storage_values_default()[1], - PMAX_injection = val_mat, - PMAX_withdrawal = val_mat, - inflows = val_mat, - lower_rule_curve = val_mat, - upper_rule_curve = val_mat, - opts = opts) - - suppressWarnings(opts <- setSimulationPath(opts$studyPath, simulation = "input")) - - ## createClusterST() - # With overwrite FALSE - expect_error(createClusterST(area = area, - cluster_name = cl_name, - storage_parameters = storage_values_default()[1], - PMAX_injection = val_mat, - PMAX_withdrawal = val_mat, - inflows = val_mat, - lower_rule_curve = val_mat, - upper_rule_curve = val_mat, - overwrite = FALSE, - opts = opts), regexp = "Cluster already exists.") - - # With overwrite TRUE - expect_no_error(createClusterST(area = area, - cluster_name = cl_name, - storage_parameters = storage_values_default()[1], - PMAX_injection = val_mat, - PMAX_withdrawal = val_mat, - inflows = val_mat, - lower_rule_curve = val_mat, - upper_rule_curve = val_mat, - overwrite = TRUE, - opts = opts)) - - # Test case insensitive - cl_name_2 <- "clUstEr_st_tEst_crEAtE2" - expect_no_error(createClusterST(area = area, - cluster_name = cl_name_2, - storage_parameters = storage_values_default()[1], - PMAX_injection = val_mat, - PMAX_withdrawal = val_mat, - inflows = val_mat, - lower_rule_curve = val_mat, - upper_rule_curve = val_mat, - overwrite = FALSE, - opts = simOptions())) - - expect_error(createClusterST(area = toupper(area), - cluster_name = toupper(cl_name_2), - storage_parameters = storage_values_default()[1], - PMAX_injection = val_mat, - PMAX_withdrawal = val_mat, - inflows = val_mat, - lower_rule_curve = val_mat, - upper_rule_curve = val_mat, - overwrite = FALSE, - opts = simOptions()), regexp = "Cluster already exists.") - - ## removeClusterST() - # On a non-existing area - expect_error(removeClusterST(area = "bla", - cluster_name = cl_name, - add_prefix = TRUE, - opts = simOptions()), regexp = "is not a valid area name") - - # On a non-existing cluster - expect_error(removeClusterST(area = area, - cluster_name = "not_a_cluster", - opts = simOptions()), regexp = "Cluster can not be removed.") - - # On an existing cluster - expect_no_error(removeClusterST(area = area, - cluster_name = cl_name, - add_prefix = TRUE, - opts = simOptions())) - - # On an existing cluster - idempotence - expect_error(removeClusterST(area = area, - cluster_name = cl_name, - opts = simOptions()), regexp = "Cluster can not be removed.") - - # On an existing cluster case insensitive - expect_no_error(removeClusterST(area = area, - cluster_name = "CLuSTeR_ST_TeST_CReaTe2", - add_prefix = TRUE, - opts = simOptions())) - - unlink(x = opts$studyPath, recursive = TRUE) -}) - - -test_that("Test the behaviour of createClusterST() if the ST cluster already exists", { - - ant_version <- "8.6.0" - st_test <- paste0("my_study_860_", paste0(sample(letters,5),collapse = "")) - suppressWarnings(opts <- createStudy(path = pathstd, study_name = st_test, antares_version = ant_version)) - area <- "zone51" - createArea(area) - suppressWarnings(opts <- setSimulationPath(opts$studyPath, simulation = "input")) - - val <- 0.7 - val_mat <- matrix(val, 8760) - cl_name <- "test_storage" - createClusterST(area = area, - cluster_name = cl_name, - storage_parameters = storage_values_default()[1], - PMAX_injection = val_mat, - PMAX_withdrawal = val_mat, - inflows = val_mat, - lower_rule_curve = val_mat, - upper_rule_curve = val_mat, - opts = opts) - - suppressWarnings(opts <- setSimulationPath(opts$studyPath, simulation = "input")) - - ## createClusterST() - # With overwrite FALSE - expect_error(createClusterST(area = area, - cluster_name = cl_name, - storage_parameters = storage_values_default()[1], - PMAX_injection = val_mat, - PMAX_withdrawal = val_mat, - inflows = val_mat, - lower_rule_curve = val_mat, - upper_rule_curve = val_mat, - overwrite = FALSE, - opts = opts), regexp = "Cluster already exists.") - - # With overwrite TRUE - expect_no_error(createClusterST(area = area, - cluster_name = cl_name, - storage_parameters = storage_values_default()[1], - PMAX_injection = val_mat, - PMAX_withdrawal = val_mat, - inflows = val_mat, - lower_rule_curve = val_mat, - upper_rule_curve = val_mat, - overwrite = TRUE, - opts = opts)) - - # Test case insensitive - cl_name_2 <- "clUstEr_st_tEst_crEAtE2" - expect_no_error(createClusterST(area = area, - cluster_name = cl_name_2, - storage_parameters = storage_values_default()[1], - PMAX_injection = val_mat, - PMAX_withdrawal = val_mat, - inflows = val_mat, - lower_rule_curve = val_mat, - upper_rule_curve = val_mat, - overwrite = FALSE, - opts = simOptions())) - - expect_error(createClusterST(area = toupper(area), - cluster_name = toupper(cl_name_2), - storage_parameters = storage_values_default()[1], - PMAX_injection = val_mat, - PMAX_withdrawal = val_mat, - inflows = val_mat, - lower_rule_curve = val_mat, - upper_rule_curve = val_mat, - overwrite = FALSE, - opts = simOptions()), regexp = "Cluster already exists.") - - ## removeClusterST() - # On a non-existing area - expect_error(removeClusterST(area = "bla", - cluster_name = cl_name, - add_prefix = TRUE, - opts = simOptions()), regexp = "is not a valid area name") - - # On a non-existing cluster - expect_error(removeClusterST(area = area, - cluster_name = "not_a_cluster", - opts = simOptions()), regexp = "Cluster can not be removed.") - - # On an existing cluster - expect_no_error(removeClusterST(area = area, - cluster_name = cl_name, - add_prefix = TRUE, - opts = simOptions())) - - # On an existing cluster - idempotence - expect_error(removeClusterST(area = area, - cluster_name = cl_name, - opts = simOptions()), regexp = "Cluster can not be removed.") - - # On an existing cluster case insensitive - expect_no_error(removeClusterST(area = area, - cluster_name = "CLuSTeR_ST_TeST_CReaTe2", - add_prefix = TRUE, - opts = simOptions())) - - unlink(x = opts$studyPath, recursive = TRUE) -}) - - # API ---- test_that("API Command test for createClusterST", { @@ -471,3 +260,37 @@ test_that("API Command test for createClusterST", { testthat::expect_true(all(unlist(names_file_api) %in% names_file_list)) }) + +# >=880 ---- + +test_that("Create short-term storage cluster (new feature v8.8.0)",{ + ## basics errors cases ---- + suppressWarnings( + createStudy(path = tempdir(), + study_name = "st-storage880", + antares_version = "8.8.0")) + + # default area with st cluster + area_test_clust = "al" + createArea(name = area_test_clust) + + # default + createClusterST(area = area_test_clust, + cluster_name = "default") + + read_prop <- readClusterSTDesc() + + # "enabled" must be present with TRUE values default + testthat::expect_true("enabled"%in%names(read_prop)) + testthat::expect_true(read_prop$enabled[1]%in%TRUE) + + deleteStudy() + }) + + + + + + + + diff --git a/tests/testthat/test-createLink.R b/tests/testthat/test-createLink.R index 4c844a93..68f10c42 100644 --- a/tests/testthat/test-createLink.R +++ b/tests/testthat/test-createLink.R @@ -291,12 +291,12 @@ test_that("removeLink() : link is not removed if it is referenced in a binding c suppressWarnings(opts <- setSimulationPath(path = opts$studyPath, simulation = "input")) - expect_error(removeLink(from = "zone1", to = "zone2", opts = opts), regexp = "Can not remove the link") - removeBindingConstraint(name = "bc_zone1", opts = opts) - expect_no_error(removeLink(from = "zone1", to = "zone2", opts = opts)) + expect_warning(removeLink(from = "zone1", to = "zone2", opts = opts), + regexp = "The following binding constraints have the link to remove as a coefficient :") # createLink() with overwrite to TRUE calls removeLink() - expect_error(createLink(from = "zone2", to = "zone3", overwrite = TRUE, opts = opts), regexp = "Can not remove the link") + expect_warning(createLink(from = "zone2", to = "zone3", overwrite = TRUE, opts = opts), + regexp = "The following binding constraints have the link to remove as a coefficient :") pathIni <- file.path(opts$inputPath, "bindingconstraints/bindingconstraints.ini") bindingConstraints <- readIniFile(pathIni, stringsAsFactors = FALSE) @@ -307,7 +307,8 @@ test_that("removeLink() : link is not removed if it is referenced in a binding c names(bindingConstraints[[bc_char]])[names(bindingConstraints[[bc_char]]) == "zone4%zone5"] <- "zone5%zone4" writeIni(listData = bindingConstraints, pathIni = pathIni, overwrite = TRUE) - expect_error(removeLink(from = "zone4", to = "zone5", opts = opts), regexp = "Can not remove the link") + expect_warning(removeLink(from = "zone4", to = "zone5", opts = opts), + regexp = "The following binding constraints have the link to remove as a coefficient :") unlink(x = opts$studyPath, recursive = TRUE) }) diff --git a/tests/testthat/test-editBindingConstraint.R b/tests/testthat/test-editBindingConstraint.R index d93cfa7a..6e6eea33 100644 --- a/tests/testthat/test-editBindingConstraint.R +++ b/tests/testthat/test-editBindingConstraint.R @@ -82,13 +82,14 @@ test_that("editBindingConstraint with 'default' group v8.7.0", { values = scenar_values_hourly, enabled = TRUE, timeStep = "hourly", - operator = "both", + operator = "greater", overwrite = TRUE, coefficients = data_terms) # PS : in this study, "default" have 1 column dimension bc <- readBindingConstraints(opts = opts_test) + ### greater to both ---- # edit properties + values (good dimension) # edit "greater" to "both" bc_names_v870 <- bc[[name_bc]]$properties$id @@ -115,11 +116,93 @@ test_that("editBindingConstraint with 'default' group v8.7.0", { testthat::expect_true(filter_year %in% "daily") testthat::expect_true(filter_synthesis %in% "daily") - # test values + # test dim values dim_col_values_input <- dim(scenar_values_daily$lt)[2] dim_col_values_edited <- dim(bc_modified[[bc_names_v870]]$values$less)[2] testthat::expect_equal(dim_col_values_input, dim_col_values_edited) + # test real values + # for both + operator_bc <- c("_lt", "_gt") + path_bc <- file.path(opts_test$inputPath, "bindingconstraints") + path_file_bc <- paste0(file.path(path_bc, bc_names_v870), + operator_bc, ".txt") + + # read .txt (test values) + res <- lapply(path_file_bc, + antaresRead:::fread_antares, + opts = opts_test) + + # txt files (test real value) + # test just first values cause code convert 8760 to 8784 with 0 + testthat::expect_equal(head(res[[1]]), + head(data.table::as.data.table(scenar_values_daily$lt))) + testthat::expect_equal(head(res[[2]]), + head(data.table::as.data.table(scenar_values_daily$gt))) + + + + ### greater to equal ---- + # edit properties + values (good dimension) + # edit "both" to "equal" + bc_names_v870 <- bc[[name_bc]]$properties$id + editBindingConstraint(name = bc_names_v870, + values = scenar_values_daily, + timeStep = "daily", + operator = "equal", + filter_year_by_year = "daily", + filter_synthesis = "daily", + coefficients = list("fr%it"= 7.45)) + + # test real values + # for equal + operator_bc <- c("_eq") + path_bc <- file.path(opts_test$inputPath, "bindingconstraints") + path_file_bc <- paste0(file.path(path_bc, bc_names_v870), + operator_bc, ".txt") + + # read .txt (test values) + res <- lapply(path_file_bc, + antaresRead:::fread_antares, + opts = opts_test) + + # txt files (test real value) + # test just first values cause code convert 8760 to 8784 with 0 + testthat::expect_equal(head(res[[1]]), + head(data.table::as.data.table(scenar_values_daily$eq))) + + ### equal to less ---- + # edit properties + values (good dimension) + # edit "equal" to "less" + bc_names_v870 <- bc[[name_bc]]$properties$id + editBindingConstraint(name = bc_names_v870, + values = scenar_values_daily, + timeStep = "daily", + operator = "less", + filter_year_by_year = "daily", + filter_synthesis = "daily", + coefficients = list("fr%it"= 7.45)) + + # test real values + # for equal + operator_bc <- c("_lt") + path_bc <- file.path(opts_test$inputPath, "bindingconstraints") + path_file_bc <- paste0(file.path(path_bc, bc_names_v870), + operator_bc, ".txt") + + # read .txt (test values) + res <- lapply(path_file_bc, + antaresRead:::fread_antares, + opts = opts_test) + + # txt files (test real value) + # test just first values cause code convert 8760 to 8784 with 0 + testthat::expect_equal(head(res[[1]]), + head(data.table::as.data.table(scenar_values_daily$lt))) + + + + # edit properties + values (bad dimension) ### error dimension ---- diff --git a/tests/testthat/test-editClusterST.R b/tests/testthat/test-editClusterST.R index 263e9ee8..6c57d5c6 100644 --- a/tests/testthat/test-editClusterST.R +++ b/tests/testthat/test-editClusterST.R @@ -1,4 +1,5 @@ +# v860 ---- test_that("edit st-storage clusters (only for study >= v8.6.0" , { # global params for structure v8.6 ---- opts_test <-createStudy(path = tempdir(), @@ -46,7 +47,7 @@ test_that("edit st-storage clusters (only for study >= v8.6.0" , { group = "Other1", add_prefix = FALSE, opts = opts_test), - regexp = "'casper' does not exist") + regexp = "'casper' doesn't exist") ## default edition cluster ---- # if all parameters are NULL => no edition of ini and data .txt @@ -236,3 +237,40 @@ test_that("API Command test for editClusterST", { names_file_list)) }) +# v880 ---- +test_that("Edit short-term storage cluster (new feature v8.8.0)",{ + ## basics errors cases ---- + suppressWarnings( + createStudy(path = tempdir(), + study_name = "st-storage880", + antares_version = "8.8.0")) + + # default area with st cluster + area_test_clust = "al" + createArea(name = area_test_clust) + + # default + createClusterST(area = area_test_clust, + cluster_name = "default") + + # edit + list_params <- storage_values_default() + list_params$efficiency <- 0.5 + list_params$reservoircapacity <- 50 + list_params$enabled <- FALSE + + editClusterST(area = area_test_clust, + cluster_name = "default", + storage_parameters = list_params) + + # read properties + st_params <- readClusterSTDesc() + + # "enabled" must be present + testthat::expect_true("enabled"%in%names(st_params)) + testthat::expect_true(st_params$enabled[1]%in%FALSE) + + deleteStudy() +}) + + diff --git a/tests/testthat/test-editLink.R b/tests/testthat/test-editLink.R index 901d9a64c..10efe0a8 100644 --- a/tests/testthat/test-editLink.R +++ b/tests/testthat/test-editLink.R @@ -3,39 +3,61 @@ test_that("Edit a link filters", { pasteVectorItemsWithComma <- function(x) paste(x,collapse=", ") - opts_test <-createStudy(path = tempdir(), - study_name = "edit-link", - antares_version = "8.6.0") + opts_test <- suppressWarnings(createStudy(path = tempdir(), + study_name = "edit-link", + antares_version = "8.6.0" + ) + ) - opts_test <- createArea(name="area1",opts=opts_test) - opts_test <- createArea(name="area2",opts=opts_test) - opts_test <- createLink(from="area1",to="area2",opts=opts_test) + opts_test <- createArea(name = "area1", opts = opts_test) + opts_test <- createArea(name = "area2", opts = opts_test) + opts_test <- createArea(name = "area3", opts = opts_test) + opts_test <- createLink(from = "area1", to = "area2", opts = opts_test) + opts_test <- createLink(from = "area1", to = "area3", opts = opts_test) - new_filtering_synthesis <- c("hourly","daily") - new_filtering_year_by_year <- c("hourly","daily") + new_filtering_synthesis <- c("hourly", "daily") + new_filtering_year_by_year <- c("hourly", "daily") - link_test <- getGeographicTrimming(areas="area1",opts=opts_test)$links$`area1 - area2` + link_test <- getGeographicTrimming(areas = "area1", opts = opts_test)[["links"]][["area1 - area2"]] testthat::expect_false( link_test$`filter-synthesis`==pasteVectorItemsWithComma(new_filtering_synthesis) && link_test$`filter-year-by-year`==pasteVectorItemsWithComma(new_filtering_year_by_year) ) - opts_test <- editLink( - from="area1", - to="area2", - filter_year_by_year=new_filtering_year_by_year, - filter_synthesis=new_filtering_synthesis, - opts=opts_test + from = "area1", + to = "area2", + filter_year_by_year = new_filtering_year_by_year, + filter_synthesis = new_filtering_synthesis, + opts = opts_test ) - new_link_test <- getGeographicTrimming(areas="area1",opts=opts_test)$links$`area1 - area2` + new_link_test <- getGeographicTrimming(areas = "area1", opts = opts_test)[["links"]][["area1 - area2"]] testthat::expect_true( new_link_test$`filter-synthesis`==pasteVectorItemsWithComma(new_filtering_synthesis) && new_link_test$`filter-year-by-year`==pasteVectorItemsWithComma(new_filtering_year_by_year) ) + # Default case : filter_synthesis/filter_year_by_year NULL + # The goal is to test that those two properties are not overwritten if NULL is provided. + geo_before <- getGeographicTrimming(areas = "area1", opts = opts_test) + geo_before_target_link <- geo_before[["links"]][["area1 - area3"]] + + ncol <- 2 + new_tsLink <- matrix(rep(1, 8760 * ncol), ncol = ncol) + opts_test <- editLink( + from = "area1", + to = "area3", + tsLink = new_tsLink, + opts = opts_test + ) + + geo_after <- getGeographicTrimming(areas = "area1", opts = opts_test) + geo_after_target_link <- geo_after[["links"]][["area1 - area3"]] + + expect_true(geo_before_target_link[["filter-year-by-year"]] == geo_after_target_link[["filter-year-by-year"]]) + expect_true(geo_before_target_link[["filter-synthesis"]] == geo_after_target_link[["filter-synthesis"]]) }) \ No newline at end of file diff --git a/tests/testthat/test-updateGeneralSettings.R b/tests/testthat/test-updateGeneralSettings.R index f103ad6d..312d21f3 100644 --- a/tests/testthat/test-updateGeneralSettings.R +++ b/tests/testthat/test-updateGeneralSettings.R @@ -13,17 +13,58 @@ sapply(studies, function(study) { test_that("Update a general parameter", { # year-by-year - expect_true(getOption("antares")$parameters$general$`year-by-year`) - updateGeneralSettings(year.by.year = FALSE) - expect_false(getOption("antares")$parameters$general$`year-by-year`) + current_value <- getOption("antares")$parameters$general$`year-by-year` + updateGeneralSettings(year.by.year = !current_value, opts = opts) + new_value <- getOption("antares")$parameters$general$`year-by-year` + if (current_value) { + expect_false(new_value) + } else { + expect_true(new_value) + } # geographic-trimming - expect_true(getOption("antares")$parameters$general$`geographic-trimming`) - updateGeneralSettings(geographic.trimming = FALSE) - expect_false(getOption("antares")$parameters$general$`geographic-trimming`) + current_value <- getOption("antares")$parameters$general$`geographic-trimming` + updateGeneralSettings(geographic.trimming = !current_value, opts = opts) + new_value <- getOption("antares")$parameters$general$`geographic-trimming` + if (current_value) { + expect_false(new_value) + } else { + expect_true(new_value) + } + + # thematic-trimming + current_value <- getOption("antares")$parameters$general$`thematic-trimming` + updateGeneralSettings(thematic.trimming = !current_value, opts = opts) + new_value <- getOption("antares")$parameters$general$`thematic-trimming` + if (current_value) { + expect_false(new_value) + } else { + expect_true(new_value) + } + }) # remove temporary study unlink(x = file.path(pathstd, "test_case"), recursive = TRUE) }) + + +# custom-scenario ---- +test_that("updateGeneralSettings() : check appearance of property custom-scenario and check if it is written in lowercase", { + + ant_version <- "8.2.0" + st_test <- paste0("my_study_820_", paste0(sample(letters,5),collapse = "")) + suppressWarnings(opts <- createStudy(path = pathstd, study_name = st_test, antares_version = ant_version)) + + # custom-scenario (logical) + expect_false(getOption("antares")$parameters$general$`custom-scenario`) + updateGeneralSettings(custom.scenario = TRUE, opts = opts) + expect_true(getOption("antares")$parameters$general$`custom-scenario`) + # check lower case for a logical value + lines_generaldata <- readLines(file.path(opts$studyPath, "settings", "generaldata.ini")) + expect_false(paste0(dicoGeneralSettings("custom.scenario"), " = TRUE") %in% lines_generaldata) + expect_true(paste0(dicoGeneralSettings("custom.scenario"), " = true") %in% lines_generaldata) + + unlink(x = opts$studyPath, recursive = TRUE) +}) diff --git a/tests/testthat/test-updateOptimizationSettings.R b/tests/testthat/test-updateOptimizationSettings.R index a5020e07..9354d4e8 100644 --- a/tests/testthat/test-updateOptimizationSettings.R +++ b/tests/testthat/test-updateOptimizationSettings.R @@ -27,3 +27,22 @@ sapply(studies, function(study) { unlink(x = file.path(pathstd, "test_case"), recursive = TRUE) }) + + +test_that("solver.log parameter available only if version >= 8.8", { + + ant_version <- "8.2.0" + st_test <- paste0("my_study_820_", paste0(sample(letters,5),collapse = "")) + suppressWarnings(opts <- createStudy(path = pathstd, study_name = st_test, antares_version = ant_version)) + expect_error(updateOptimizationSettings(solver.log = "true"), + regexp = "updateOptimizationSettings: solver.log parameter is only available if using Antares >= 8.8.0" + ) + unlink(x = opts$studyPath, recursive = TRUE) + + ant_version <- "8.8.0" + st_test <- paste0("my_study_880_", paste0(sample(letters,5),collapse = "")) + suppressWarnings(opts <- createStudy(path = pathstd, study_name = st_test, antares_version = ant_version)) + updateOptimizationSettings(solver.log = "true") + expect_true(getOption("antares")$parameters$optimization$`solver-log`) + unlink(x = opts$studyPath, recursive = TRUE) +}) diff --git a/tests/testthat/test-updateOutputSettings.R b/tests/testthat/test-updateOutputSettings.R index 1f5257a8..cb13784d 100644 --- a/tests/testthat/test-updateOutputSettings.R +++ b/tests/testthat/test-updateOutputSettings.R @@ -7,14 +7,37 @@ sapply(studies, function(study) { setup_study(study, sourcedir) opts <- antaresRead::setSimulationPath(studyPath, "input") - - test_that("Update an output parameter", { - updateOutputSettings(synthesis = FALSE) + # synthesis + current_value <- getOption("antares")[["parameters"]][["output"]][["synthesis"]] + opts <- updateOutputSettings(synthesis = !current_value, opts = opts) + new_value <- getOption("antares")[["parameters"]][["output"]][["synthesis"]] + + if (current_value) { + expect_false(new_value) + } else { + expect_true(new_value) + } - expect_false(getOption("antares")$parameters$output$synthesis) + # storenewset + current_value <- getOption("antares")[["parameters"]][["output"]][["storenewset"]] + opts <- updateOutputSettings(storenewset = !current_value, opts = opts) + new_value <- getOption("antares")[["parameters"]][["output"]][["storenewset"]] + + if (current_value) { + expect_false(new_value) + } else { + expect_true(new_value) + } + + # archives + current_value <- getOption("antares")[["parameters"]][["output"]][["archives"]] + opts <- updateOutputSettings(archives = c("load", "wind"), opts = opts) + new_value <- getOption("antares")[["parameters"]][["output"]][["archives"]] + expect_true(current_value != new_value) + expect_true(new_value == .format_ini_rhs(value = c("load", "wind"))) }) # remove temporary study diff --git a/tests/testthat/test-utils.R b/tests/testthat/test-utils.R new file mode 100644 index 00000000..0f976911 --- /dev/null +++ b/tests/testthat/test-utils.R @@ -0,0 +1,48 @@ +test_that("Control the short-term storage existence",{ + + ant_version <- "8.7.0" + study_name <- paste0("my_study_870_", paste0(sample(letters,5),collapse = "")) + suppressWarnings(opts <- createStudy(path = pathstd, study_name = study_name, antares_version = ant_version)) + + nb_areas <- 5 + ids_areas <- seq(1,nb_areas) + my_areas <- paste0("zone",ids_areas) + + lapply(my_areas, FUN = function(area){createArea(name = area, opts = simOptions())}) + + st_clusters <- c("batterie", "pondage") + my_clusters <- expand.grid("area" = my_areas, "cluster_name" = st_clusters) + + apply(my_clusters[,c("area","cluster_name")], + MARGIN = 1, + FUN = function(row){ + createClusterST(area = as.character(row[1]), + cluster_name = as.character(row[2]), + add_prefix = FALSE, + opts = simOptions() + ) + } + ) + + createClusterST(area = "zone1", cluster_name = "vehicle", add_prefix = FALSE, opts = simOptions()) + exists_st_cluster <- check_cluster_name(area = "zone1", cluster_name = "vehicle", add_prefix = FALSE, opts = simOptions()) + expect_true(exists_st_cluster) + exists_st_cluster <- check_cluster_name(area = "zone3", cluster_name = "vehicle", add_prefix = FALSE, opts = simOptions()) + expect_false(exists_st_cluster) +}) + + +test_that("Control the basic behaviour of .format_ini_rhs()",{ + + res <- .format_ini_rhs(value = TRUE) + expect_true(res == "true") + + res <- .format_ini_rhs(value = FALSE) + expect_true(res == "false") + + res <- .format_ini_rhs(value = "fake_value") + expect_true(res == "fake_value") + + res <- .format_ini_rhs(value = letters[seq(1,5)]) + expect_true(res == paste(letters[seq(1,5)], collapse = ", ")) +}) diff --git a/tests/testthat/test-writeInputTS.R b/tests/testthat/test-writeInputTS.R index 672ec4a6..b136c1c8 100644 --- a/tests/testthat/test-writeInputTS.R +++ b/tests/testthat/test-writeInputTS.R @@ -3,44 +3,44 @@ context("Function writeInputTS") # v710 ---- sapply(studies, function(study) { - + setup_study(study, sourcedir) opts <- antaresRead::setSimulationPath(studyPath, "input") - - + + test_that("Write new input time series", { # Classic cases ---- - + area <- sample(x = getOption("antares")$areaList, size = 1) - + M <- matrix(c(rep(8, 8760), rep(5.1, 8760)), nrow = 8760) - + writeInputTS(area = area, type = "solar", data = M) - + values_file <- file.path(pathstd, "test_case", "input", "solar", "series", paste0("solar_", area, ".txt")) - + expect_equal(antaresRead:::fread_antares(opts = opts, file = values_file), as.data.table(M)) - - + + #Wrong Area expect_error( writeInputTS(area = "fake area", type = "solar", data = M), regexp = "not a valid area" ) - + #Run a second time the function without overwrite = TRUE. expect_error( writeInputTS(area = area, type = "solar", data = M, overwrite = FALSE), regexp = "already exist" ) - + #Wrong dimension for data. expect_error( writeInputTS(area = area, type = "solar", data = matrix(1:3)), regexp = "8760\\*N matrix" ) - + #unknown type expect_error( writeInputTS(area = area, @@ -49,36 +49,36 @@ sapply(studies, function(study) { overwrite = TRUE), regexp = "'arg'" ) - - + + # hydroSTOR case ---- - + M_hydrostor <- matrix(c(rep(8, 365), rep(5.1, 365)), nrow = 365) - + writeInputTS(area = area, type = "hydroSTOR", data = M_hydrostor) - + values_file <- file.path(pathstd, "test_case", "input", "hydro", "series", area, "mod.txt") - + expect_equal(antaresRead:::fread_antares(opts = opts, file = values_file), as.data.table(M_hydrostor)) - + #Wrong area expect_error( writeInputTS(area = "fake area", type = "hydroSTOR", data = M_hydrostor), regexp = "not a valid area" ) - + #Run a second time the function without overwrite = TRUE. expect_error( writeInputTS(area = area, type = "hydroSTOR", data = M_hydrostor, overwrite = FALSE), regexp = "already exist" ) - + #Wrong dimension for data. expect_error( writeInputTS(area = area, type = "hydroSTOR", data = matrix(1:3)), regexp = "365\\*N matrix" ) - + #unknown type expect_error( writeInputTS(area = area, @@ -89,17 +89,17 @@ sapply(studies, function(study) { regexp = "'arg'" ) }) - + # remove temporary study unlink(x = file.path(pathstd, "test_case"), recursive = TRUE) - + }) # >= 820 ---- ## Alphabetical order links ---- test_that("Check if writeInputTS() writes time series link regardless alphabetical order", { - + ant_version <- "8.2.0" st_test <- paste0("my_study_820_", paste0(sample(letters,5),collapse = "")) suppressWarnings(opts <- createStudy(path = pathstd, study_name = st_test, antares_version = ant_version)) @@ -108,47 +108,47 @@ test_that("Check if writeInputTS() writes time series link regardless alphabetic createArea(area) createArea(area2) suppressWarnings(opts <- setSimulationPath(opts$studyPath, simulation = "input")) - + createLink(from = area, to = area2, opts = opts) suppressWarnings(opts <- setSimulationPath(opts$studyPath, simulation = "input")) - + path_direct_link_file <- file.path(opts$inputPath, "links", area, "capacities", paste0(area2,"_direct.txt")) path_indirect_link_file <- file.path(opts$inputPath, "links", area, "capacities", paste0(area2,"_indirect.txt")) - + dat_mat <- c(1,3,2,4) dat_mat_inv <- c(4,2,3,1) nb_cols <- length(dat_mat) - + # alphabetical order mat_multi_scen <- matrix(data = rep(dat_mat, each = 8760), ncol = nb_cols) writeInputTS(data = mat_multi_scen, link = paste0(area,"%",area2), type = "tsLink", opts = opts) - + expect_equal(antaresRead:::fread_antares(opts = opts, file = path_direct_link_file), as.data.table(mat_multi_scen[,seq(1, nb_cols/2)])) - + expect_equal(antaresRead:::fread_antares(opts = opts, file = path_indirect_link_file), as.data.table(mat_multi_scen[,seq((nb_cols/2)+1, nb_cols)])) - + # no alphabetical order mat_multi_scen_inv <- matrix(data = rep(dat_mat_inv, each = 8760), ncol = nb_cols) writeInputTS(data = mat_multi_scen_inv, link = paste0(area2,"%",area), type = "tsLink", opts = opts) - + expect_equal(antaresRead:::fread_antares(opts = opts, file = path_indirect_link_file), as.data.table(mat_multi_scen_inv[,seq(1, nb_cols/2)])) - + expect_equal(antaresRead:::fread_antares(opts = opts, file = path_direct_link_file), as.data.table(mat_multi_scen_inv[,seq((nb_cols/2)+1, nb_cols)])) - + }) ## Separator link type ---- test_that("Check if writeInputTS() writes links time series with argument link 'area1 - area2' or 'area1%area2'", { - + ant_version <- "8.2.0" st_test <- paste0("my_study_820_", paste0(sample(letters,5),collapse = "")) suppressWarnings(opts <- createStudy(path = pathstd, study_name = st_test, antares_version = ant_version)) @@ -157,43 +157,43 @@ test_that("Check if writeInputTS() writes links time series with argument link ' createArea(area) createArea(area2) suppressWarnings(opts <- setSimulationPath(opts$studyPath, simulation = "input")) - + createLink(from = area, to = area2, opts = opts) suppressWarnings(opts <- setSimulationPath(opts$studyPath, simulation = "input")) - + path_direct_link_file <- file.path(opts$inputPath, "links", area, "capacities", paste0(area2,"_direct.txt")) path_indirect_link_file <- file.path(opts$inputPath, "links", area, "capacities", paste0(area2,"_indirect.txt")) - + dat_mat_sep_1 <- c(1,3,2,4) nb_cols <- length(dat_mat_sep_1) mat_ts_sep_1 <- matrix(data = rep(dat_mat_sep_1, each = 8760), ncol = nb_cols) - + dat_mat_sep_2 <- c(5,7,6,8) nb_cols <- length(dat_mat_sep_2) mat_ts_sep_2 <- matrix(data = rep(dat_mat_sep_2, each = 8760), ncol = nb_cols) - + # link separator '%' writeInputTS(data = mat_ts_sep_1, link = paste0(area,"%",area2), type = "tsLink", opts = opts) - + expect_equal(antaresRead:::fread_antares(opts = opts, file = path_direct_link_file), as.data.table(mat_ts_sep_1[,seq(1, nb_cols/2)])) - + expect_equal(antaresRead:::fread_antares(opts = opts, file = path_indirect_link_file), as.data.table(mat_ts_sep_1[,seq((nb_cols/2)+1, nb_cols)])) - + # link separator ' - ' writeInputTS(data = mat_ts_sep_2, link = paste0(area," - ",area2), type = "tsLink", opts = opts) - + expect_equal(antaresRead:::fread_antares(opts = opts, file = path_direct_link_file), as.data.table(mat_ts_sep_2[,seq(1, nb_cols/2)])) - + expect_equal(antaresRead:::fread_antares(opts = opts, file = path_indirect_link_file), as.data.table(mat_ts_sep_2[,seq((nb_cols/2)+1, nb_cols)])) - + }) @@ -201,7 +201,7 @@ test_that("Check if writeInputTS() writes links time series with argument link ' # >= v860 ---- ## write mingen file ---- - # write mingen file depend of dimension of mod.txt file +# write mingen file depend of dimension of mod.txt file test_that("create mingen file with one or empty column dimension of mod.txt file", { # create study to have mod empty and mod with one column @@ -218,17 +218,16 @@ test_that("create mingen file with one or empty column dimension of mod.txt file opts <- simOptions() # check dimensions of mod.txt for every areas - path_file_mod <- file.path(opts$inputPath, "hydro", "series", - getAreas(), + path_file_mod <- file.path(opts$inputPath, "hydro", "series", + getAreas(), "mod.txt") - - list_dim <- lapply(path_file_mod, function(x){ + list_dim <- lapply(path_file_mod, function(x){ # read file <- fread(file = x) dim_file <- dim(file)[2] }) - + names(list_dim) <- getAreas() ## trivial case @@ -263,12 +262,12 @@ test_that("create mingen file with one or empty column dimension of mod.txt file area_1 <- getAreas()[list_dim==1][1] # write for an area with file mod.txt NULL or nb columns == 1 - writeInputTS(area = area_1, type = "mingen", + writeInputTS(area = area_1, type = "mingen", data = M_mingen , overwrite = TRUE, opts = opts) - + # use antaresRead to test read_ts_file <- readInputTS(mingen = "all", opts = opts) - + # tests correct reading data # check col name "mingen" testthat::expect_true("mingen" %in% names(read_ts_file)) @@ -299,132 +298,132 @@ test_that("create mingen file with multi dimension mod.txt file", { #Avoid warning related to code writed outside test_that. #suppressWarnings(opts <- antaresRead::setSimulationPath(study_temp_path, "input")) - -## Check column dimension ---- -test_that("create mingen file data v860", { - - #Initialize mingen data - M_mingen = matrix(0,8760,5) - # write TS with 3 columns for area_3 et file mod.txt - writeInputTS(data = matrix(12,365,3), type = "hydroSTOR", area = area_3, overwrite = TRUE, opts = opts) - - # [management rules] for mingen data : - # file mod.txt (in /series) have to be same column dimension - # or column dimension of 1 or NULL (empty file) - - # check dimensions of mod.txt for every areas - path_file_mod <- file.path(opts$inputPath, "hydro", "series", - getAreas(), - "mod.txt") - - list_dim <- lapply(path_file_mod, function(x){ - # read - file <- fread(file = x) - dim_file <- dim(file)[2] - }) - - names(list_dim) <- getAreas() -# <<<<<<< HEAD -# -# # PS : study v8.7.0 have only mod files with 5 columns dimension -# -# -# ## multi columns cas for mod.txt file -# # mod.txt column dimension >= 1 -# area_mult <- getAreas()[list_dim>1][1] -# -# # rewrite with less columns -# mod_data = matrix(60,365,4) -# -# writeInputTS(area = area_mult, -# type = "hydroSTOR", -# data = mod_data, -# overwrite = TRUE) -# -# # write for an area with file mod.txt >1 columns -# # error case cause mod.txt dimension -# testthat::expect_error(writeInputTS(area = area_mult, type = "mingen", -# data = M_mingen , overwrite = TRUE, opts = opts), -# regexp = 'mingen \'data\' must be either a 8760\\*1 or 8760\\*4 matrix.') -# -# # you can write only mingen file with dimension 1 -# writeInputTS(area = area_mult, type = "mingen", -# data = as.matrix(M_mingen[,1]) , -# ======= - - ## trivial case - # mod.txt column dimension == 1 - # write for an area with file mod.txt NULL or nb columns == 1 - writeInputTS(area = area_1, type = "mingen", - data = M_mingen , overwrite = TRUE, opts = opts) - - # use antaresRead to test - read_ts_file <- readInputTS(mingen = "all", opts = opts) - - # tests correct reading data - # check col name "mingen" - testthat::expect_true("mingen" %in% names(read_ts_file)) - # check your area - testthat::expect_true(area_1 %in% unique(read_ts_file$area)) - # check dimension data for your area - testthat::expect_equal(dim(M_mingen)[2], max(read_ts_file[area %in% area_1, tsId])) - - - # mod.txt column dimension == 0 (empty file) - area_2 <- getAreas()[list_dim==0][1] - - # write for an area with file mod.txt empty columns == 0 - writeInputTS(area = area_2, type = "mingen", - data = M_mingen , overwrite = TRUE, opts = opts) - - # use antaresRead to test - read_ts_file <- readInputTS(mingen = "all", opts = opts) - - # check your area - testthat::expect_true(area_2 %in% unique(read_ts_file$area)) - - - ## multi columns cas for mod.txt file - # mod.txt column dimension >= 1 - - - # write for an area with file mod.txt >1 columns - # error case cause mod.txt dimension - - area_mult <- getAreas()[list_dim>1][1] - testthat::expect_error(writeInputTS(area = area_mult, type = "mingen", - data = matrix(0,8760,5) , overwrite = TRUE, opts = opts), - regexp = 'mingen \'data\' must be either a 8760\\*1 or 8760\\*3 matrix.') - - # you can write only mingen file with dimension 1 - writeInputTS(area = area_2, type = "mingen", - data = as.matrix(M_mingen[,1]) , - overwrite = TRUE, opts = opts) - - # use antaresRead to test - read_ts_file <- readInputTS(mingen = "all", opts = opts) - - # check your area - testthat::expect_true(area_2 %in% unique(read_ts_file$area)) - # check dimension data for your area - testthat::expect_equal(1, max(read_ts_file[area %in% area_2, tsId])) - - - - - - ## display warning message with type= "hydroSTOR" (minor update function v860) - - # Wrong format of data, here it must be either 1 or 5 columns. - M_hydrostor <- matrix(c(rep(8, 365), rep(5.1, 365)), nrow = 365) - - # warning about the file format - expect_warning(writeInputTS(area = getAreas()[2], - type = "hydroSTOR", - data = M_hydrostor, - opts = opts), - regexp = "mod 'data' must be") + ## Check column dimension ---- + test_that("create mingen file data v860", { + + #Initialize mingen data + M_mingen = matrix(0,8760,5) + # write TS with 3 columns for area_3 et file mod.txt + writeInputTS(data = matrix(12,365,3), type = "hydroSTOR", area = area_3, overwrite = TRUE, opts = opts) + + # [management rules] for mingen data : + # file mod.txt (in /series) have to be same column dimension + # or column dimension of 1 or NULL (empty file) + + # check dimensions of mod.txt for every areas + path_file_mod <- file.path(opts$inputPath, "hydro", "series", + getAreas(), + "mod.txt") + + + list_dim <- lapply(path_file_mod, function(x){ + # read + file <- fread(file = x) + dim_file <- dim(file)[2] + }) + + names(list_dim) <- getAreas() + # <<<<<<< HEAD + # + # # PS : study v8.7.0 have only mod files with 5 columns dimension + # + # + # ## multi columns cas for mod.txt file + # # mod.txt column dimension >= 1 + # area_mult <- getAreas()[list_dim>1][1] + # + # # rewrite with less columns + # mod_data = matrix(60,365,4) + # + # writeInputTS(area = area_mult, + # type = "hydroSTOR", + # data = mod_data, + # overwrite = TRUE) + # + # # write for an area with file mod.txt >1 columns + # # error case cause mod.txt dimension + # testthat::expect_error(writeInputTS(area = area_mult, type = "mingen", + # data = M_mingen , overwrite = TRUE, opts = opts), + # regexp = 'mingen \'data\' must be either a 8760\\*1 or 8760\\*4 matrix.') + # + # # you can write only mingen file with dimension 1 + # writeInputTS(area = area_mult, type = "mingen", + # data = as.matrix(M_mingen[,1]) , + # ======= + + ## trivial case + # mod.txt column dimension == 1 + # write for an area with file mod.txt NULL or nb columns == 1 + writeInputTS(area = area_1, type = "mingen", + data = M_mingen , overwrite = TRUE, opts = opts) + + # use antaresRead to test + read_ts_file <- readInputTS(mingen = "all", opts = opts) + + # tests correct reading data + # check col name "mingen" + testthat::expect_true("mingen" %in% names(read_ts_file)) + # check your area + testthat::expect_true(area_1 %in% unique(read_ts_file$area)) + # check dimension data for your area + testthat::expect_equal(dim(M_mingen)[2], max(read_ts_file[area %in% area_1, tsId])) + + + # mod.txt column dimension == 0 (empty file) + area_2 <- getAreas()[list_dim==0][1] + + # write for an area with file mod.txt empty columns == 0 + writeInputTS(area = area_2, type = "mingen", + data = M_mingen , overwrite = TRUE, opts = opts) + + # use antaresRead to test + read_ts_file <- readInputTS(mingen = "all", opts = opts) + + # check your area + testthat::expect_true(area_2 %in% unique(read_ts_file$area)) + + + ## multi columns cas for mod.txt file + # mod.txt column dimension >= 1 + + + # write for an area with file mod.txt >1 columns + # error case cause mod.txt dimension + + area_mult <- getAreas()[list_dim>1][1] + testthat::expect_error(writeInputTS(area = area_mult, type = "mingen", + data = matrix(0,8760,5) , overwrite = TRUE, opts = opts), + regexp = 'mingen \'data\' must be either a 8760\\*1 or 8760\\*3 matrix.') + + # you can write only mingen file with dimension 1 + writeInputTS(area = area_2, type = "mingen", + data = as.matrix(M_mingen[,1]) , + overwrite = TRUE, opts = opts) + + # use antaresRead to test + read_ts_file <- readInputTS(mingen = "all", opts = opts) + + # check your area + testthat::expect_true(area_2 %in% unique(read_ts_file$area)) + # check dimension data for your area + testthat::expect_equal(1, max(read_ts_file[area %in% area_2, tsId])) + + + + + + ## display warning message with type= "hydroSTOR" (minor update function v860) + + # Wrong format of data, here it must be either 1 or 5 columns. + M_hydrostor <- matrix(c(rep(8, 365), rep(5.1, 365)), nrow = 365) + + # warning about the file format + expect_warning(writeInputTS(area = getAreas()[2], + type = "hydroSTOR", + data = M_hydrostor, + opts = opts), + regexp = "mod 'data' must be") }) unlink(x = opts$studyPath, recursive = TRUE) testthat::expect_true(TRUE) @@ -433,14 +432,14 @@ test_that("create mingen file data v860", { # Rollback to empty file ---- test_that("writeInputTS() in 8.6.0 : rollback to an empty file", { - + ant_version <- "8.6.0" st_test <- paste0("my_study_860_", paste0(sample(letters,5),collapse = "")) suppressWarnings(opts <- createStudy(path = tempdir(), study_name = st_test, antares_version = ant_version)) area <- "zone51" opts <- createArea(area) opts <- setSimulationPath(opts$studyPath, simulation = "input") - + path_mingen <- file.path(opts$inputPath, "hydro", "series", area, "mingen.txt") mat_mingen <- matrix(6,8760,5) expect_error(writeInputTS(area = area, @@ -451,43 +450,43 @@ test_that("writeInputTS() in 8.6.0 : rollback to an empty file", { ,regexp = "can not be updated" ) expect_true(file.size(path_mingen) == 0) - + unlink(x = opts$studyPath, recursive = TRUE) }) # Error mingen.txt vs mod.txt ---- test_that("writeInputTS() in 8.6.0 : check if there is an error when control is enabled and data is inconsistent between mingen.txt and mod.txt", { - + ant_version <- "8.6.0" st_test <- paste0("my_study_860_", paste0(sample(letters,5),collapse = "")) suppressWarnings(opts <- createStudy(path = tempdir(), study_name = st_test, antares_version = ant_version)) area <- "zone51" opts <- createArea(area) opts <- setSimulationPath(opts$studyPath, simulation = "input") - + lst_yearly <- list("use heuristic" = TRUE, "follow load" = TRUE, "reservoir" = TRUE) lst_monthly <- list("use heuristic" = TRUE, "follow load" = TRUE, "reservoir" = FALSE) lst_weekly <- list("use heuristic" = TRUE, "follow load" = FALSE) - + nb_hours_per_day <- 24 nb_days_per_year <- 365 nb_hours_per_year <- nb_hours_per_day * nb_days_per_year # Put more than 1 ts nb_ts <- 5 - + mat_maxpower_init <- matrix(data = rep(c(10000, 24, 0, 24), each = 365), ncol = 4) - + mat_mingen_false <- matrix(1,nb_hours_per_year,nb_ts) mat_mingen_true <- matrix(-1,nb_hours_per_year,nb_ts) mat_mingen_init <- matrix(0,nb_hours_per_year,nb_ts) - + mat_mod_false <- matrix(-1,nb_days_per_year,nb_ts) mat_mod_true <- matrix(1,nb_days_per_year,nb_ts) mat_mod_init <- matrix(0,nb_days_per_year,nb_ts) - + writeHydroValues(area= area, type = "maxpower", data = mat_maxpower_init, opts = opts) - + # YEARLY writeIniHydro(area, params = lst_yearly, mode = "other", opts = opts) # ref mod @@ -508,7 +507,7 @@ test_that("writeInputTS() in 8.6.0 : check if there is an error when control is ) ,regexp = "can not be updated" ) - + # MONTHLY writeIniHydro(area, params = lst_monthly, mode = "other", opts = opts) # ref mod @@ -529,7 +528,7 @@ test_that("writeInputTS() in 8.6.0 : check if there is an error when control is ) ,regexp = "can not be updated" ) - + # WEEKLY writeIniHydro(area, params = lst_weekly, mode = "other", opts = opts) # ref mod @@ -550,47 +549,47 @@ test_that("writeInputTS() in 8.6.0 : check if there is an error when control is ) ,regexp = "can not be updated" ) - + unlink(x = opts$studyPath, recursive = TRUE) - + }) # Success mingen.txt vs mod.txt ---- test_that("writeInputTS() in 8.6.0 : check if new data is written when control is enabled and data is consistent between mingen.txt and mod.txt", { - + ant_version <- "8.6.0" st_test <- paste0("my_study_860_", paste0(sample(letters,5),collapse = "")) suppressWarnings(opts <- createStudy(path = tempdir(), study_name = st_test, antares_version = ant_version)) area <- "zone51" opts <- createArea(area) opts <- setSimulationPath(opts$studyPath, simulation = "input") - + path_mod_file <- file.path(opts$inputPath, "hydro", "series", area, "mod.txt") path_mingen_file <- file.path(opts$inputPath, "hydro", "series", area, "mingen.txt") - + lst_yearly <- list("use heuristic" = TRUE, "follow load" = TRUE, "reservoir" = TRUE) lst_monthly <- list("use heuristic" = TRUE, "follow load" = TRUE, "reservoir" = FALSE) lst_weekly <- list("use heuristic" = TRUE, "follow load" = FALSE) - + nb_hours_per_day <- 24 nb_days_per_year <- 365 nb_hours_per_year <- nb_hours_per_day * nb_days_per_year # Put more than 1 ts nb_ts <- 5 - + mat_maxpower_init <- matrix(data = rep(c(10000, 24, 0, 24), each = 365), ncol = 4) - + mat_mingen_false <- matrix(1,nb_hours_per_year,nb_ts) mat_mingen_true <- matrix(-1,nb_hours_per_year,nb_ts) mat_mingen_init <- matrix(0,nb_hours_per_year,nb_ts) - + mat_mod_false <- matrix(-1,nb_days_per_year,nb_ts) mat_mod_true <- matrix(1,nb_days_per_year,nb_ts) mat_mod_init <- matrix(0,nb_days_per_year,nb_ts) - + writeHydroValues(area= area, type = "maxpower", data = mat_maxpower_init, opts = opts) - + # YEARLY writeIniHydro(area, params = lst_yearly, mode = "other", opts = opts) # ref mod @@ -605,7 +604,7 @@ test_that("writeInputTS() in 8.6.0 : check if new data is written when control i expect_equal(antaresRead:::fread_antares(opts = opts, file = path_mod_file), as.data.table(mat_mod_true)) - + # MONTHLY writeIniHydro(area, params = lst_monthly, mode = "other", opts = opts) # ref mod @@ -620,7 +619,7 @@ test_that("writeInputTS() in 8.6.0 : check if new data is written when control i expect_equal(antaresRead:::fread_antares(opts = opts, file = path_mod_file), as.data.table(mat_mod_true)) - + # WEEKLY writeIniHydro(area, params = lst_weekly, mode = "other", opts = opts) # ref mod @@ -635,46 +634,46 @@ test_that("writeInputTS() in 8.6.0 : check if new data is written when control i expect_equal(antaresRead:::fread_antares(opts = opts, file = path_mod_file), as.data.table(mat_mod_true)) - + unlink(x = opts$studyPath, recursive = TRUE) - + }) # Success when disabled control ---- test_that("writeInputTS() in 8.6.0 : check if new data is written when control is disabled", { - + ant_version <- "8.6.0" st_test <- paste0("my_study_860_", paste0(sample(letters,5),collapse = "")) suppressWarnings(opts <- createStudy(path = tempdir(), study_name = st_test, antares_version = ant_version)) area <- "zone51" opts <- createArea(area) opts <- setSimulationPath(opts$studyPath, simulation = "input") - + path_mod_file <- file.path(opts$inputPath, "hydro", "series", area, "mod.txt") path_mingen_file <- file.path(opts$inputPath, "hydro", "series", area, "mingen.txt") - + lst_wo_control <- list("use heuristic" = FALSE) - + nb_hours_per_day <- 24 nb_days_per_year <- 365 nb_hours_per_year <- nb_hours_per_day * nb_days_per_year # Put more than 1 ts nb_ts <- 5 - + mat_maxpower_init <- matrix(data = rep(c(10000, 24, 0, 24), each = 365), ncol = 4) - + mat_mingen_false <- matrix(1,nb_hours_per_year,nb_ts) mat_mingen_true <- matrix(-1,nb_hours_per_year,nb_ts) mat_mingen_init <- matrix(0,nb_hours_per_year,nb_ts) - + mat_mod_false <- matrix(-1,nb_days_per_year,nb_ts) mat_mod_true <- matrix(1,nb_days_per_year,nb_ts) mat_mod_init <- matrix(0,nb_days_per_year,nb_ts) - + writeIniHydro(area, params = lst_wo_control, mode = "other", opts = opts) writeHydroValues(area= area, type = "maxpower", data = mat_maxpower_init, opts = opts) - + # ref mod writeInputTS(area = area, data = mat_mod_init, type = "hydroSTOR", opts = opts) writeInputTS(area = area, data = mat_mingen_true, type = "mingen", opts = opts) @@ -703,8 +702,6 @@ test_that("writeInputTS() in 8.6.0 : check if new data is written when control i expect_equal(antaresRead:::fread_antares(opts = opts, file = path_mod_file), as.data.table(mat_mod_init)) - + unlink(x = opts$studyPath, recursive = TRUE) -}) - - +}) \ No newline at end of file diff --git a/vignettes/api-variant-management.Rmd b/vignettes/api-variant-management.Rmd index 98f82e91..6d9ffaa9 100644 --- a/vignettes/api-variant-management.Rmd +++ b/vignettes/api-variant-management.Rmd @@ -107,7 +107,7 @@ writeVariantCommands("path/to/commands.json") Below are listed all functions from {antaresEditObject} that can be used with the API. These functions will include the following badge in their documentation: ```{r, echo=FALSE} -knitr::include_graphics("figures/badge_api_ok.svg") +knitr::include_graphics("../man/figures/badge_api_ok.svg") ```