From a22d136391b7cfc1d827126c7f10f641c15c874b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Rochette?= Date: Mon, 19 Sep 2022 19:38:45 +0200 Subject: [PATCH] Upload CRAN scripts for Linux Why ? - Received link to CRAN check scripts - Need an exploration and tests on different flavours What? - Scripts and env. variables for package incoming checks by Kurt - Scripts and env. variables for CRAN regular checks of all packages issue issue #17 --- .../CRAN_incoming/CRAN_incoming.Rproj | 16 + .../CRAN_scripts/CRAN_incoming/check.Renviron | 95 +++ .../CRAN_incoming/check_CRAN_incoming.R | 275 +++++++ .../CRAN_scripts/CRAN_regular/check-R-ng | 585 ++++++++++++++ .../CRAN_scripts/CRAN_regular/check.Renviron | 95 +++ .../CRAN_regular/check_CRAN_regular.R | 743 ++++++++++++++++++ .../CRAN_scripts/exploration_of_scripts.md | 125 +++ 7 files changed, 1934 insertions(+) create mode 100644 Documents/CRAN_scripts/CRAN_incoming/CRAN_incoming.Rproj create mode 100644 Documents/CRAN_scripts/CRAN_incoming/check.Renviron create mode 100644 Documents/CRAN_scripts/CRAN_incoming/check_CRAN_incoming.R create mode 100644 Documents/CRAN_scripts/CRAN_regular/check-R-ng create mode 100644 Documents/CRAN_scripts/CRAN_regular/check.Renviron create mode 100644 Documents/CRAN_scripts/CRAN_regular/check_CRAN_regular.R create mode 100644 Documents/CRAN_scripts/exploration_of_scripts.md diff --git a/Documents/CRAN_scripts/CRAN_incoming/CRAN_incoming.Rproj b/Documents/CRAN_scripts/CRAN_incoming/CRAN_incoming.Rproj new file mode 100644 index 0000000..e83436a --- /dev/null +++ b/Documents/CRAN_scripts/CRAN_incoming/CRAN_incoming.Rproj @@ -0,0 +1,16 @@ +Version: 1.0 + +RestoreWorkspace: Default +SaveWorkspace: Default +AlwaysSaveHistory: Default + +EnableCodeIndexing: Yes +UseSpacesForTab: Yes +NumSpacesForTab: 2 +Encoding: UTF-8 + +RnwWeave: Sweave +LaTeX: pdfLaTeX + +AutoAppendNewline: Yes +StripTrailingWhitespace: Yes diff --git a/Documents/CRAN_scripts/CRAN_incoming/check.Renviron b/Documents/CRAN_scripts/CRAN_incoming/check.Renviron new file mode 100644 index 0000000..021a88c --- /dev/null +++ b/Documents/CRAN_scripts/CRAN_incoming/check.Renviron @@ -0,0 +1,95 @@ +## KH's personal ~/.R/check.Renviron. +## Last updated on 2022-08-10 + +### Defaults for '--as-cran": commented out where not appropriate for +### all KH checks. +_R_CHECK_AUTOCONF_=true +## _R_CHECK_BASHISMS_=true +_R_CHECK_BOGUS_RETURN_=true +_R_CHECK_CODE_USAGE_VIA_NAMESPACES_=true +_R_CHECK_CODE_USAGE_WITH_ONLY_BASE_ATTACHED_=true +## _R_CHECK_CODOC_VARIABLES_IN_USAGES_=true +_R_CHECK_COMPILATION_FLAGS_=true +## _R_CHECK_CONNECTIONS_LEFT_OPEN_=true +_R_CHECK_DEPENDS_ONLY_DATA_=true +## _R_CHECK_DONTTEST_EXAMPLES_=true +_R_CHECK_DOT_FIRSTLIB_=true +_R_CHECK_FF_AS_CRAN_=true +_R_CHECK_FUTURE_FILE_TIMESTAMPS_=true +_R_CHECK_INSTALL_DEPENDS_=true +_R_CHECK_LIMIT_CORES_=true +_R_CHECK_MATRIX_DATA_=true +## _R_CHECK_NATIVE_ROUTINE_REGISTRATION_=true +_R_CHECK_NO_RECOMMENDED_=true +_R_CHECK_NO_STOP_ON_TEST_ERROR_=true +## _R_CHECK_ORPHANED_=true +_R_CHECK_OVERWRITE_REGISTERED_S3_METHODS_=true +_R_CHECK_PACKAGE_DATASETS_SUPPRESS_NOTES_=true +## _R_CHECK_PACKAGES_USED_CRAN_INCOMING_NOTES_=true +_R_CHECK_PACKAGES_USED_IGNORE_UNUSED_IMPORTS_=true +_R_CHECK_PACKAGES_USED_IN_TESTS_USE_SUBDIRS_=true +_R_CHECK_PRAGMAS_=true +## _R_CHECK_R_DEPENDS_=warn +_R_CHECK_R_ON_PATH_=true +_R_CHECK_RD_VALIDATE_RD2HTML_=${_R_CHECK_RD_VALIDATE_RD2HTML_-true} +## _R_CHECK_RD_CONTENTS_KEYWORDS_=true +_R_CHECK_SCREEN_DEVICE_=stop +_R_CHECK_S3_METHODS_NOT_REGISTERED_=true +_R_CHECK_SHLIB_OPENMP_FLAGS_=true +_R_CHECK_TIMINGS_=10 +_R_CHECK_THINGS_IN_CHECK_DIR_=true +## _R_CHECK_THINGS_IN_TEMP_DIR_=true +_R_CHECK_VIGNETTE_TITLES_=true +## _R_CHECK_XREFS_PKGS_ARE_DECLARED_=true +## _R_CHECK_XREFS_MIND_SUSPECT_ANCHORS_=true +_R_SHLIB_BUILD_OBJECTS_SYMBOL_TABLES_=true + +_R_OPTIONS_STRINGS_AS_FACTORS_=${_R_OPTIONS_STRINGS_AS_FACTORS_-false} + +### Additional settings used for all KH checks. +_R_CHECK_ALWAYS_LOG_VIGNETTE_OUTPUT_=true +_R_CHECK_CODE_ASSIGN_TO_GLOBALENV_=true +_R_CHECK_CODE_ATTACH_=true +_R_CHECK_CODE_DATA_INTO_GLOBALENV_=true +_R_CHECK_CODETOOLS_PROFILE_="suppressPartialMatchArgs=false" +_R_CHECK_CRAN_INCOMING_CHECK_URLS_IN_PARALLEL_=true +_R_CHECK_DEPRECATED_DEFUNCT_=true +_R_CHECK_DOC_SIZES2_=true +_R_CHECK_DOTCODE_RETVAL_=true +_R_CHECK_EXECUTABLES_EXCLUSIONS_=false +## _R_CHECK_FF_CALLS_=registration +_R_CHECK_FUTURE_FILE_TIMESTAMPS_LEEWAY_=6h +_R_GC_FAIL_ON_ERROR_=true +_R_CHECK_LENGTH_1_CONDITION_="package:_R_CHECK_PACKAGE_NAME_,verbose" +## _R_CHECK_LENGTH_1_CONDITION_=${_R_CHECK_LENGTH_1_CONDITION_-warn} +## _R_CHECK_LENGTH_1_LOGIC2_="package:_R_CHECK_PACKAGE_NAME_,verbose" +_R_CHECK_OVERWRITE_REGISTERED_S3_METHODS_=true +_R_CHECK_PACKAGE_DATASETS_SUPPRESS_NOTES_=true +_R_CHECK_PKG_SIZES_=false +_R_CHECK_R_DEPENDS_=true +_R_CHECK_RD_LINE_WIDTHS_=true +_R_CHECK_RD_MATH_RENDERING_=true +_R_CHECK_REPLACING_IMPORTS_=true +_R_CHECK_SERIALIZATION_=true +_R_CHECK_SRC_MINUS_W_IMPLICIT_=true +_R_CHECK_SUGGESTS_ONLY_=${_R_CHECK_SUGGESTS_ONLY_-true} +_R_CHECK_SYSTEM_CLOCK_=false +_R_CHECK_THINGS_IN_TEMP_DIR_EXCLUDE_="^(ompi|pulse|runtime-)" +_R_CHECK_TOPLEVEL_FILES_=true +_R_CHECK_VC_DIRS_=true +_R_CHECK_VIGNETTES_SKIP_RUN_MAYBE_=true +_R_CHECK_XREFS_USE_ALIASES_FROM_CRAN_=true +## Outputs +_R_CHECK_TESTS_NLINES_=0 +_R_CHECK_VIGNETTES_NLINES_=10000 +## Timings +_R_CHECK_TIMINGS_=0 +_R_CHECK_EXAMPLE_TIMING_CPU_TO_ELAPSED_THRESHOLD_=2.5 +_R_CHECK_TEST_TIMING_=yes +_R_CHECK_TEST_TIMING_CPU_TO_ELAPSED_THRESHOLD_=2.5 +_R_CHECK_VIGNETTE_TIMING_=yes +_R_CHECK_VIGNETTE_TIMING_CPU_TO_ELAPSED_THRESHOLD_=2.5 + +## FIXME: remove eventually ... +## _R_CLASS_MATRIX_ARRAY_=${_R_CLASS_MATRIX_ARRAY_-true} + diff --git a/Documents/CRAN_scripts/CRAN_incoming/check_CRAN_incoming.R b/Documents/CRAN_scripts/CRAN_incoming/check_CRAN_incoming.R new file mode 100644 index 0000000..a51dfe0 --- /dev/null +++ b/Documents/CRAN_scripts/CRAN_incoming/check_CRAN_incoming.R @@ -0,0 +1,275 @@ +check_dir <- file.path(normalizePath("~"), "tmp", "CRAN") + +user <- Sys.info()["user"] +if(user == "unknown") user <- Sys.getenv("LOGNAME") +Sys.setenv("R_USER_DATA_DIR" = + sprintf("/tmp/check-CRAN-incoming-%s/data", user), + "R_USER_CACHE_DIR" = + sprintf("/tmp/check-CRAN-incoming-%s/cache", user), + "R_USER_CONFIG_DIR" = + sprintf("/tmp/check-CRAN-incoming-%s/config", user)) + +Sys.setenv("_R_CHECK_INSTALL_DEPENDS_" = "true") + +Sys.setenv("R_GC_MEM_GROW" = "2", + "R_C_BOUNDS_CHECK" = "yes") + +## +## Need OMP thread limit as 3 instead of 4 when using OpenBLAS. +Sys.setenv("OMP_NUM_THREADS" = 3, # 4? + "OMP_THREAD_LIMIT" = 3, # 4? + "RCPP_PARALLEL_NUM_THREADS" = 4, + "POCL_KERNEL_CACHE" = 0, + "OMPI_MCA_btl_base_warn_component_unused" = 0 + ) +## Or maybe instead just +Sys.setenv("OPENBLAS_NUM_THREADS" = 1) +## ??? +## + +Sys.setenv("_R_CHECK_FORCE_SUGGESTS_" = "false", + "_R_CHECK_PACKAGE_DEPENDS_IGNORE_MISSING_ENHANCES_" = "true") + +if(dir.exists(path <- file.path(normalizePath("~"), "tmp", "scratch"))) + Sys.setenv("TMPDIR" = path) + +check_args <- character() # No longer "--as-cran" ... +update_check_dir <- TRUE +use_check_stoplists <- FALSE +Ncpus <- 6 + +hostname <- system2("hostname", "-f", stdout = TRUE) +if(hostname == "xmanduin.wu.ac.at") { + Sys.setenv("_R_CHECK_EXAMPLE_TIMING_THRESHOLD_" = "10") + Ncpus <- 10 +} +if(hostname %in% c("anduin2.wu.ac.at", "anduin3.wu.ac.at")) { + Ncpus <- 28 +} + +Sys.setenv("_R_S3_METHOD_LOOKUP_BASEENV_AFTER_GLOBALENV_" = + Sys.getenv("_R_S3_METHOD_LOOKUP_BASEENV_AFTER_GLOBALENV_", + "true")) + +## +## Change eventually ... +Sys.setenv("_R_CHECK_NATIVE_ROUTINE_REGISTRATION_" = + Sys.getenv("_R_CHECK_NATIVE_ROUTINE_REGISTRATION_", + "false")) +## + +## +## Remove eventually ... +Sys.setenv("_R_S3_METHOD_LOOKUP_USE_TOPENV_AS_DEFENV_" = + Sys.getenv("_R_S3_METHOD_LOOKUP_USE_TOPENV_AS_DEFENV_", + "true")) +## + +## +## Remove eventually .. +Sys.setenv("_R_STOP_ON_XTFRM_DATA_FRAME_" = + Sys.getenv("_R_STOP_ON_XTFRM_DATA_FRAME_", + "true")) +## + +reverse <- NULL + +## +## Perhaps add a -p argument to be passed to getIncoming? +## Currently, -p KH/*.tar.gz is hard-wired. +## + +usage <- function() { + cat("Usage: check_CRAN_incoming [options]", + "", + "Run KH CRAN incoming checks.", + "", + "Options:", + " -h, --help print short help message and exit", + " -n do not update check dir", + " -s use stop lists", + " -c run CRAN incoming feasibility checks", + " -r also check strong reverse depends", + " -r=WHICH also check WHICH reverse depends", + " -N=N use N CPUs", + " -f=FLAVOR use flavor FLAVOR ('g' or 'c' for the GCC or Clang", + " defaults, 'g/v' or 'c/v' for the version 'v' ones)", + " -d=DIR use DIR as check dir (default: ~/tmp/CRAN)", + " -l run local incoming checks only", + " -a=ARGS pass ARGS to R CMD check", + "", + "The CRAN incoming feasibility checks are always used for CRAN", + "incoming checks (i.e., unless '-n' is given), and never when", + "checking the reverse dependencies.", + sep = "\n") +} + +check_args_db_from_stoplist_sh <- +function() +{ + x <- system(". ~/lib/bash/check_R_stoplists.sh; set", intern = TRUE) + x <- grep("^check_args_db_", x, value = TRUE) + db <- sub(".*='(.*)'$", "\\1", x) + names(db) <- + chartr("_", ".", sub("^check_args_db_([^=]*)=.*", "\\1", x)) + db +} + +args <- commandArgs(trailingOnly = TRUE) +if(any(ind <- (args %in% c("-h", "--help")))) { + usage() + q("no", runLast = FALSE) +} +if(any(ind <- (args == "-n"))) { + update_check_dir <- FALSE + args <- args[!ind] +} +if(any(ind <- (args == "-s"))) { + use_check_stoplists <- TRUE + args <- args[!ind] +} +run_CRAN_incoming_feasibility_checks <- update_check_dir +if(any(ind <- (args == "-c"))) { + run_CRAN_incoming_feasibility_checks <- TRUE + args <- args[!ind] +} +if(any(ind <- (args == "-r"))) { + reverse <- list() + args <- args[!ind] +} +if(any(ind <- (args == "-l"))) { + Sys.setenv("_R_CHECK_CRAN_INCOMING_SKIP_URL_CHECKS_IF_REMOTE_" = "true", + "_R_CHECK_CRAN_INCOMING_SKIP_DOI_CHECKS_" = "true") + args <- args[!ind] +} +if(any(ind <- startsWith(args, "-r="))) { + which <- substring(args[ind][1L], 4L) + reverse <- if(which == "most") { + list(which = list(c("Depends", "Imports", "LinkingTo"), + "Suggests"), + reverse = c(TRUE, FALSE)) + } else { + list(which = which) + } + args <- args[!ind] +} +if(any(ind <- startsWith(args, "-N="))) { + Ncpus <- list(which = substring(args[ind][1L], 4L)) + args <- args[!ind] +} +if(any(ind <- startsWith(args, "-d="))) { + check_dir <- substring(args[ind][1L], 4L) + args <- args[!ind] +} +if(any(ind <- startsWith(args, "-a="))) { + check_args <- substring(args[ind][1L], 4L) + args <- args[!ind] +} +if(length(args)) { + stop(paste("unknown option(s):", + paste(sQuote(args), collapse = ", "))) +} + +# if(update_check_dir) { +# unlink(check_dir, recursive = TRUE) +# if(system2("getIncoming", +# c("-p KH/*.tar.gz", "-d", check_dir), +# stderr = FALSE)) { +# message("no packages to check") +# q("no", status = 1, runLast = FALSE) +# } +# message("") +# } + + + +check_args_db <- if(use_check_stoplists) { + check_args_db_from_stoplist_sh() +} else { + list() +} +check_env_common <- + c(paste0("LANG=", Sys.getenv("_R_CHECK_LANG_", "en_US.UTF-8")), + ## (allow checking with LANG different from en_US.UTF-8) + "LC_COLLATE=C", + "LANGUAGE=en@quot", + sprintf("_R_CHECK_CRAN_STATUS_SUMMARY_=%s", + Sys.getenv("_R_CHECK_CRAN_STATUS_SUMMARY_", "true")), + "_R_TOOLS_C_P_I_D_ADD_RECOMMENDED_MAYBE_=true", + ## These could be conditionalized according to hostname. + ## "R_SESSION_TIME_LIMIT_CPU=900", + ## "R_SESSION_TIME_LIMIT_ELAPSED=1800", + ## + ## Currently, tools::check_packages_in_dir() only uses + ## _R_INSTALL_PACKAGES_ELAPSED_TIMEOUT_ when installing + ## dependencies. + "_R_INSTALL_PACKAGES_ELAPSED_TIMEOUT_=90m", + ## + "_R_CHECK_ELAPSED_TIMEOUT_=30m", + "_R_CHECK_INSTALL_ELAPSED_TIMEOUT_=90m", + character() + ) +check_env <- + list(c(check_env_common, + "_R_CHECK_WARN_BAD_USAGE_LINES_=TRUE", + sprintf("_R_CHECK_CRAN_INCOMING_SKIP_VERSIONS_=%s", + !run_CRAN_incoming_feasibility_checks), + sprintf("_R_CHECK_CRAN_INCOMING_SKIP_DATES_=%s", + !run_CRAN_incoming_feasibility_checks), + "_R_CHECK_CODOC_VARIABLES_IN_USAGES_=true", + "_R_CHECK_CONNECTIONS_LEFT_OPEN_=true", + "_R_CHECK_CRAN_INCOMING_=true", + "_R_CHECK_CRAN_INCOMING_NOTE_GNU_MAKE_=true", + "_R_CHECK_CRAN_INCOMING_REMOTE_=true", + "_R_CHECK_CRAN_INCOMING_USE_ASPELL_=true", + "_R_CHECK_CRAN_INCOMING_CHECK_FILE_URIS_=true", + "_R_CHECK_DATALIST_=true", + if(run_CRAN_incoming_feasibility_checks) + c("_R_CHECK_LENGTH_1_CONDITION_=package:_R_CHECK_PACKAGE_NAME_,abort,verbose", + ## "_R_CHECK_LENGTH_1_LOGIC2_=package:_R_CHECK_PACKAGE_NAME_,abort,verbose", + character()), + ## "_R_CHECK_ORPHANED_=true", + "_R_CHECK_PACKAGE_DEPENDS_IGNORE_MISSING_ENHANCES_=true", + "_R_CHECK_PACKAGES_USED_CRAN_INCOMING_NOTES_=true", + "_R_CHECK_RD_CONTENTS_KEYWORDS_=true", + "_R_CHECK_R_DEPENDS_=warn", + "_R_CHECK_THINGS_IN_TEMP_DIR_=true", + "_R_CHECK_BASHISMS_=true", + ## "_R_CHECK_XREFS_MIND_SUSPECT_ANCHORS_=true", + "_R_CHECK_URLS_SHOW_301_STATUS_=true", + "_R_CHECK_CODE_CLASS_IS_STRING_=true", + "_R_CHECK_NEWS_IN_PLAIN_TEXT_=true", + character()), + c(check_env_common, + ## + ## Remove eventually ... + "_R_CHECK_CRAN_INCOMING_=false", + ## + "_R_CHECK_CONNECTIONS_LEFT_OPEN_=false", + "_R_CHECK_THINGS_IN_TEMP_DIR_=false", + ## "_R_CHECK_XREFS_MIND_SUSPECT_ANCHORS_=false", + ## + character()) + ) + +if(!is.null(reverse)) + reverse$repos <- getOption("repos")["CRAN"] + +pfiles <- + tools::check_packages_in_dir(check_dir, + check_args = check_args, + check_args_db = check_args_db, + reverse = reverse, + xvfb = TRUE, + check_env = check_env, + Ncpus = Ncpus) + +if(length(pfiles)) { + writeLines("\nDepends:") + tools::summarize_check_packages_in_dir_depends(check_dir) + writeLines("\nTimings:") + tools::summarize_check_packages_in_dir_timings(check_dir) + writeLines("\nResults:") + tools::summarize_check_packages_in_dir_results(check_dir) +} + diff --git a/Documents/CRAN_scripts/CRAN_regular/check-R-ng b/Documents/CRAN_scripts/CRAN_regular/check-R-ng new file mode 100644 index 0000000..38e4de3 --- /dev/null +++ b/Documents/CRAN_scripts/CRAN_regular/check-R-ng @@ -0,0 +1,585 @@ +#! /bin/sh + +## Fully Qualified Domain Name of the system we use for checking. +FQDN=`hostname -f` + +## Default flavor to use. +R_flavor=r-devel +## Location of the CRAN mirror root on the local file system. +case ${FQDN} in + *.wu.ac.at) + CRAN_rsync=/srv/R/Repositories/CRAN ;; + *) + CRAN_rsync=/data/Repositories/CRAN ;; +esac +## Location of CRAN's src/contrib on the local file system. +CRAN_dir=${CRAN_rsync}/src/contrib +## Default check args. +check_args_defaults= +## Where everything happens. +check_dir=~/tmp/R.check +## Check date in ISO 8601 format: POSIX compatible according to +## . +check_date=`date "+%F"` +check_results_mail_recipients="Kurt.Hornik@wu.ac.at" +## case ${FQDN} in +## gimli.wu.ac.at) +## check_results_mail_recipients="Kurt.Hornik@wu.ac.at maechler@stat.math.ethz.ch Uwe.Ligges@R-project.org" +## ;; +## esac +check_results_files="summary.csv timings.csv details.csv" + +## Compilers to use. +## Use configure defaults (gcc/g++/gfortran). +compilers= +## Note that (in particular to achieve additional C++ strictness) we +## prefer to run checks with +## CFLAGS = -g -O2 -Wall -pedantic +## CXXFLAGS = -g -O2 -Wall -pedantic +## but use machine-local ~/.R/Makevars for setting this. +## Additional command line arguments to configure. +blas="--with-blas=no" +## +## As of 2019-04, tried to avoid gfortran-8 problems/bugs by using the +## system BLAS/LAPACK libraries: +## configure_args="--with-blas=yes --with-lapack=yes --enable-R-shlib LIBnn=lib" +## Note that this also needs OPENBLAS_NUM_THREADS=1. +## In case this still causes troubles, switch back to +configure_args="--enable-R-shlib DEFS='-D_FORTIFY_SOURCE=2' LIBnn=lib" +## with the problems still not fixed ... +## +## Number of jobs/cores to use. +n_jobs=1 +## Mechanism to parallelize package checking. +check_packages_via_parallel_make=no +## Build R and manuals? (Empty string corresponds to no.) +build_R=yes + +## R scripts directory. +R_scripts_dir=~/lib/R/Scripts +## Shell scripts directory. +sh_scripts_dir=~/lib/bash + +suffix=gcc + +## Command line args. +while test -n "${1}"; do + case "${1}" in + -f) + R_flavor=${2} + shift + case "${R_flavor}" in + local=*) + R_source_path=`expr "${R_flavor}" : "^local=\(.*\)"` + R_flavor=local + ;; + esac + ;; + -j) + n_jobs=${2} + shift + ;; + -m) + check_packages_via_parallel_make=yes + ;; + -b) + blas="--with-blas=yes --with-lapack=yes" + ;; + -c) + flavor=${2} + case "${flavor}" in + g*) + v=`expr "${flavor}" : ".*/\\(.*\\)"` + gcc_series_is_6=false + case "${v}" in + 6) + gcc_series_is_6=true ;; + esac + test -n "${v}" && v="-${v}" + ## Breaks many packages: + ## CCSTD="-std=c11" + CCSTD= + ## CXXSTD="-std=c++98" + ## CXXSTD= + ## if test "${gcc_series_is_6}" = "true"; then + ## CXXSTD="-std=c++11" + ## fi + CC="gcc${v}" + ## CC="\"gcc${v} ${CCSTD}\"" + CXX="g++${v}" + ## CXX="\"g++${v} ${CXXSTD}\"" + compilers="CC=${CC} CXX=${CXX} F77=gfortran${v} FC=gfortran${v} OBJC=gcc${v} OBJCXX=gcc${v}" + ## compilers="CC=${CC} CXX=${CXX} CXXSTD=${CXXSTD} F77=gfortran${v} + ## FC=gfortran${v} OBJC=gcc${v} OBJCXX=gcc${v}" + ## if test "${gcc_series_is_6}" = "true"; then + ## compilers="${compilers} CXX1XSTD=\"-std=c++11\"" + ## fi + ;; + c*) + suffix=clang + vc=`expr "${flavor}" : "[^/]*/\\([^/]*\\)\\(/.*\\)\\?"` + vg=`expr "${flavor}" : "[^/]*/[^/]*/\\(.*\\)"` + test -n "${vc}" && vc="-${vc}" + test -n "${vg}" && vg="-${vg}" + ## CXXSTD="-std=c++98" + CC="clang${vc}" + CXX="clang++${vc}" + ## CXX="\"clang++${vc} -std=c++11\"" + compilers="CC=${CC} CXX=${CXX} F77=gfortran${vg} FC=gfortran${vg} OBJC=gcc${vg} OBJCXX=gcc${vg}" + ## compilers="CC=${CC} CXX=${CXX} CXXSTD=${CXXSTD} F77=gfortran${vg} + ## FC=gfortran${vg} OBJC=gcc${vg} OBJCXX=gcc${vg}" + ## ## + ## ## Added 2018-04-03 to avoid compilation errors for + ## ## RcppArmadillo: remove eventually ...? + ## CXX11STD="-std=c++11" + ## CXX14STD="-std=c++14" + ## compilers="${compilers} CXX11STD=${CXX11STD} CXX14STD=${CXX14STD}" + ## ## + ;; + esac + shift + ;; + -a) + configure_args="${configure_args} ${2}" + shift + ;; + esac + shift +done + +configure_args="${configure_args} ${blas}" + +case "${R_flavor}" in + r-devel) + R_source_url=https://cran.r-project.org/src/base-prerelease/R-devel.tar.gz + ## R_source_url=https://stat.ethz.ch/R/daily/R-devel.tar.gz + ;; + r-patched) + R_source_url=https://cran.r-project.org/src/base-prerelease/R-latest.tar.gz + ## R_source_url=https://stat.ethz.ch/R/daily/R-patched.tar.gz + ;; + r-prerel) + R_source_url=https://cran.r-project.org/src/base-prerelease/R-latest.tar.gz + ;; + r-release) + R_source_url=https://cran.r-project.org/src/base/R-latest.tar.gz + ;; +esac + +check_flavor="${R_flavor}-${suffix}" + +case ${FQDN} in + gimli*.wu.ac.at) + case "${R_flavor}" in + r-devel) + check_results_mail_recipients="Kurt.Hornik@wu.ac.at Uwe.Ligges@R-project.org maechler@stat.math.ethz.ch" + ;; + r-patched|r-prerel) + check_results_mail_recipients="Kurt.Hornik@wu.ac.at maechler@stat.math.ethz.ch" + ;; + r-release) + check_results_mail_recipients="Kurt.Hornik@wu.ac.at Uwe.Ligges@R-project.org" + ;; + esac + ;; +esac + +## No process is allowed more than 20 minutes +ulimit -t 1200 + +## Apparently needed when using bindfs to obtain a read-only mounted +## user library for checking: +ulimit -n 2048 + +## Use a bit more max stack size than used by default. +ulimit -s 16384 + +## +## No longer necessary with Rmpi 0.6-7 as of 2018-04-11: +## remove eventually. +## ## Customize distributed computing environment(s). +## RMPI_TYPE="OPENMPI" +## RMPI_INCLUDE="/usr/lib/x86_64-linux-gnu/openmpi/include" +## RMPI_LIB_PATH="/usr/lib/x86_64-linux-gnu/openmpi/lib" +## ## +## ## With the above, Rmpi should now use OpenMPI instead of LAM, and hence +## ## no longer call lamboot: so setting LAM_MPI_SESSION_SUFFIX and calling +## ## lamwipe should no longer be necessary. +## ## ## Running Rmpi calls lamboot so that at the end we should clean up by +## ## ## calling lamwipe. Of course, the LAM RTE should be check process +## ## ## specific, which can be accomplished via LAM_MPI_SESSION_SUFFIX. +## ## export LAM_MPI_SESSION_SUFFIX=${check_flavor} +## ## Remove eventually ... +## ## +## + +## Customize R +## +## if test "${check_flavor}" != "r-devel-gcc"; then +export R_BROWSER=false +export R_PDFVIEWER=false +## fi +## +## Try using a UTF-8 locale. +## export LANG="en_US.UTF-8" +export LANG="C.UTF-8" +## But not for sorting ... +export LC_COLLATE=C +export LANGUAGE="en@quot" +export R_PARALLEL_PORT=random +## Avoid hyperref problems with paper size 'letter'. +export R_PAPERSIZE=a4 +## Documented to be true in R-ints, but apparently not always. +export _R_SHLIB_BUILD_OBJECTS_SYMBOL_TABLES_=true + +## +## Setting this in check_CRAN_regular.R should be good enough ...? +## export R_GC_MEM_GROW=2 +## + +## +## Setting this in check_CRAN_regular.R should be good enough ...? +## export OMP_NUM_THREADS=4 +## export OMP_THREAD_LIMIT=4 +## export RCPP_PARALLEL_NUM_THREADS=4 +## + +## +## Setting this in check_CRAN_regular.R should be good enough ...? +## export _R_S3_METHOD_LOOKUP_BASEENV_AFTER_GLOBALENV_=true +## + +## Use r-devel-clang to record S3 method search path lookups. +if test "${check_flavor}" = "r-devel-clang"; then + export _R_S3_METHOD_LOOKUP_REPORT_SEARCH_PATH_USES_=true +fi + +## +if test "${check_flavor}" = "r-devel-clang"; then + export LANG=en_US.iso885915 +fi +## + +## if test "${check_flavor}" = "r-devel-clang"; then +## export _R_CHECK_LENGTH_1_CONDITION_="package:_R_CHECK_PACKAGE_NAME_,verbose" +## fi + +## +## Setting this in check_CRAN_regular.R should be good enough ...? +## export _R_CHECK_INSTALL_DEPENDS_=true +## + +## +## Default was 100, changed in c73545 for R 3.5. +## Remove eventually ... +## export R_MAX_NUM_DLLS=123 +## + +## +## Set if necessary ... +## export R_BIOC_VERSION=3.6 +## + +user=${USER-${LOGNAME}} +export R_USER_DATA_DIR="/tmp/check-CRAN-regular-${user}/data" +export R_USER_CACHE_DIR="/tmp/check-CRAN-regular-${user}/cache" +export R_USER_CONFIG_DIR="/tmp/check-CRAN-regular-${user}/config" + +## Create check dir if needed. +test -d ${check_dir} || mkdir ${check_dir} || exit 1 +cd ${check_dir} +## Structure inside ${check_dir}: subdirectories for each flavor. +## Within a flavor subdirectory, most of the work happens in 'Work'. +## Inside this, R sources are in 'src', R is built in 'build', and +## packages are in 'PKGS'. When done, 'PKGS' is moved up for mirroring, +## and results are saved in 'Results/${check_date}'. +test -d ${check_flavor} || mkdir ${check_flavor} || exit 1 +cd ${check_flavor} +## +## We used to do: +## ## If there is an old Xvfb/check process remaining, kill it: +## test -f Xvfb.pid && kill -9 `cat Xvfb.pid` +## test -f check.pid && kill -9 `cat check.pid` +## But perhaps better to abort and investigate ...? +if test -f check.pid; then + echo "Old check process still running ... aborting." | \ + env from=Kurt.Hornik@wu.ac.at replyto=Kurt.Hornik@R-project.org \ + REPLYTO=Kurt.Hornik@R-project.org \ + mail -s "[CRAN-check-ng] ${check_flavor}/`hostname` FAILURE" \ + -r Kurt.Hornik@wu.ac.at \ + Kurt.Hornik@R-project.org + exit 1 +fi + +## Record check pid. +echo ${$} > check.pid +## Start a virtual framebuffer X server and use this for DISPLAY so that +## we can run package tcltk and friends. We use the PID of the check +## process as the server number so that the checks for different flavors +## get different servers. +PATH=${HOME}/bin:/usr/local/bin:${PATH} +Xvfb :${$} -screen 0 1280x1024x24 >/dev/null 2>&1 & +echo ${!} > Xvfb.pid +export DISPLAY=:${$} + +## +## Shouldn't this shut down Xvfb as well and remove its pid file? +do_cleanup_and_exit () { +## +## This should no longer be necessary ... +## lamwipe -sessionsuffix ${check_flavor} || true +## Remove eventually. +## + kill -9 `cat "${check_dir}/${check_flavor}/Xvfb.pid"` 2>/dev/null && \ + rm -f "${check_dir}/${check_flavor}/Xvfb.pid" + rm -f "${check_dir}/${check_flavor}/check.pid" + ## These get populated by the check runs ... + rm -rf ~/.cache/fontconfig + rm -rf ~/.cache/pocl + exit ${1-0} +} +## + +export _CHECK_CRAN_REGULAR_LIBRARY_DIR_="${check_dir}/${check_flavor}/Work/build/Packages" +## Unmounting the read-only remount of the library dir from the previous +## check run may have failed (e.g., due to leftover processes keeping +## the library dir busy). If so, try unmounting once more, and if this +## fails again, report the offending processes. +## Note that we currently also do a bind remount for ${check_dir} to +## /srv/rsync/R.check, so that mount will report the library dir mount +## with a path starting with /src/rsync/R.check. +(mount | grep -Fq "${check_flavor}/Work/build/Packages") && \ + umount "${_CHECK_CRAN_REGULAR_LIBRARY_DIR_}" || \ + (fuser "${_CHECK_CRAN_REGULAR_LIBRARY_DIR_}" && \ + do_cleanup_and_exit 1) + +test -d Work || mkdir Work || do_cleanup_and_exit 1 +cd Work + +if test -n "${build_R}"; then +## Update ${R_flavor} sources. +## Actually, we should check whether flavor of source and target agree. +test -d src || mkdir src || do_cleanup_and_exit 1 +## Argh, rsync is gone (at least for the time being ...). +## We could of course use svn checkout on https://svn.R-project.org/R, +## but how can one get "r-patched" and "r-release" without knowing the +## corresponding branch? Hence, we get things from CRAN (release) or +## ETHZ, but need to figure out the top-level source dir for the +## unpackaged version somehow (of course, we could also read this from +## the archive). +## (cd src; rsync -rC -t --delete rsync.r-project.org::${R_flavor} .) +## +## Maybe we should use svn checkout for r-devel? +## +mv src src.save +(mkdir tmp && + cd tmp && + touch stamp && + (if test "${R_flavor}" = "local"; then + tar zxmf "${R_source_path}" + else + wget -O - --retr-symlinks ${R_source_url} | tar zxmf - + fi) && + entry=`find . -mindepth 1 -maxdepth 1 -newer stamp -type d` && + mv ${entry} ../src && + cd .. && + rm -rf src.save tmp) || (rm -rf tmp; mv src.save src) + +## Link recommended packages. +(cd src; \ + CRAN_RSYNC="${CRAN_rsync}" ./tools/rsync-recommended) + +## Rebuild R. +rm -rf build +mkdir build +(cd build && + eval ../src/configure ${configure_args} ${compilers}) || \ + do_cleanup_and_exit 1 +## Try to avoid hard-wiring top-level CRAN master URLs in HTML hrefs +## from the Texinfo manuals. +if test -f "/usr/share/texinfo/htmlxref.cnf"; then + (echo "R = ."; + cat "/usr/share/texinfo/htmlxref.cnf" | grep '^ R-') > \ + build/doc/manual/htmlxref.cnf +fi +(cd build && + make -j `echo "${n_jobs}" | cut -f1 -d/` && + make check && + make pdf) || \ + do_cleanup_and_exit 1 +(cd build/doc/manual && + make fullrefman.pdf) || \ + do_cleanup_and_exit 1 +(cd build/doc && + make docs2) || \ + do_cleanup_and_exit 1 +(cd build/doc/manual && + make epub) +fi + +mkdir -p build/Packages + +if test -f ./build/bin/R; then + R_HOME=`./build/bin/R RHOME` +else + R_HOME=`R RHOME` +fi +R_exe="${R_HOME}/bin/R" + +## Packages. +rm -rf PKGS # In case there are some leftovers ... +mkdir PKGS +cd PKGS + +## Check profile and environ settings. +export R_PROFILE_USER="${HOME}/.R/check_CRAN_regular.Rprofile" +## export R_CHECK_ENVIRON="${HOME}/.R/check_CRAN_regular.Renviron" +export R_MAKEVARS_USER="${HOME}/.R/Makevars-${suffix}" + +test -d "${HOME}/tmp/scratch" && export TMPDIR="${HOME}/tmp/scratch" + +## Pass over to R for installation and checking and summaries ... +${R_HOME}/bin/Rscript ${R_scripts_dir}/check_CRAN_regular.R \ + -j ${n_jobs} -m ${check_packages_via_parallel_make} + +## Wrap up. +cd ${check_dir}/${check_flavor} + +## Rotate old check results files. +for f in ${check_results_files} details.rds ; do + test -f "${f}.prev" && rm -f "${f}.prev" + test -f "${f}" && mv "${f}" "${f}.prev" +done +## Rotate old check results. +## +## Remove the chmod -R eventually ... +test -d PKGS.prev && chmod -R u+w PKGS.prev && rm -rf PKGS.prev +test -d PKGS && chmod -R u+w PKGS && mv PKGS PKGS.prev +## +## Move new check results up from Work. +mv Work/PKGS PKGS +chmod -R u+w PKGS +## Move new check results files up from PKGS. +for f in ${check_results_files} details.rds ; do + mv PKGS/"${f}" . +done +## Save new check results files. +for d in Results Results/${check_date}; do + test -d ${d} || mkdir ${d} || do_cleanup_and_exit 1 +done +for f in ${check_results_files}; do + cp "${f}" "Results/${check_date}" +done + +## And notify of differences ... +if test -f "summary.csv.prev"; then + diff "summary.csv.prev" "summary.csv" > "summary.csv.diff" + test -s "summary.csv.diff" || rm -f "summary.csv.diff" +fi +if test -f "summary.csv.diff"; then + echo "source(\"${R_scripts_dir}/check.R\"); \ + write_check_summary_diffs_to_con(\".\", \"summary.csv.diff\")" | \ + ${R_exe} --vanilla --slave + env from=Kurt.Hornik@wu.ac.at replyto=Kurt.Hornik@R-project.org \ + REPLYTO=Kurt.Hornik@R-project.org \ + mail -s "[CRAN-check-ng] ${check_flavor}/`hostname` summary.csv changes on `date '+%FT%T%z'`" \ + -r Kurt.Hornik@wu.ac.at \ + ${check_results_mail_recipients} < "summary.csv.diff" + rm -f "summary.csv.diff" +fi + +if test -f "details.csv.prev"; then + diff "details.csv.prev" "details.csv" > "details.csv.diff" + test -s "details.csv.diff" || rm -f "details.csv.diff" +fi +if test -f "details.csv.diff"; then + echo "source(\"${R_scripts_dir}/check.R\"); \ + flavor <- check_flavors_map[\"${check_flavor}\"]; \ + write_check_details_diffs_to_con(\".\", \"details.csv.diff\", flavor)" | \ + ${R_exe} --vanilla --slave + env from=Kurt.Hornik@wu.ac.at replyto=Kurt.Hornik@R-project.org \ + REPLYTO=Kurt.Hornik@R-project.org \ + mail -s "[CRAN-check-ng] ${check_flavor}/`hostname` details.csv changes on `date '+%FT%T%z'`" \ + -r Kurt.Hornik@wu.ac.at \ + ${check_results_mail_recipients} < "details.csv.diff" + rm -f "details.csv.diff" + echo "source(\"${R_scripts_dir}/check.R\"); \ + write_check_details_for_new_problems_to_con(\".\", \"details.txt\")" | \ + ${R_exe} --vanilla --slave + test -s "details.txt" && \ + env from=Kurt.Hornik@wu.ac.at replyto=Kurt.Hornik@R-project.org \ + REPLYTO=Kurt.Hornik@R-project.org \ + mail -s "[CRAN-check-ng] ${check_flavor}/`hostname` new problems on `date '+%FT%T%z'`" \ + -r Kurt.Hornik@wu.ac.at \ + ${check_results_mail_recipients} < "details.txt" + rm -f "details.txt" +fi + +## Manuals + +if test -n "${build_R}"; then +test -d Manuals.prev && rm -rf Manuals.prev +test -d Manuals && mv Manuals Manuals.prev +mkdir Manuals +## +## Change back to copying when 3.2.0 is out. +## cp Work/build/doc/manual/*.html Manuals +for f in Work/build/doc/manual/*.html; do + grep -v '="dir.html#Top"' ${f} > Manuals/`basename ${f}` +done +## +cp Work/build/doc/manual/*.pdf Manuals +## It would be better to have a single R.css and logo.jpg/Rlogo.svg, and +## fix NEWS.html accordingly. +cat Work/build/doc/html/NEWS.html | \ + sed 's/img src="[^"]*logo.jpg"/img src="logo.jpg"/' | \ + sed 's/img src="[^"]*Rlogo.svg"/img src="Rlogo.svg"/' \ + > Manuals/NEWS.html +cp Work/build/doc/html/R.css Manuals +cp Work/build/doc/html/logo.jpg Manuals +cp Work/build/doc/html/Rlogo.svg Manuals +cp Work/build/doc/NEWS*.pdf Manuals +cat Work/build/doc/html/NEWS.2.html | \ + sed 's/img src="[^"]*Rlogo.svg"/img src="Rlogo.svg"/' \ + > Manuals/NEWS.2.html +cat Work/build/doc/html/NEWS.3.html | \ + sed 's/img src="[^"]*Rlogo.svg"/img src="Rlogo.svg"/' \ + > Manuals/NEWS.3.html +cp Work/build/doc/manual/*.epub Manuals +mkdir Manuals/images +cp Work/build/doc/manual/images/*.png Manuals/images +fi + +## Refmans in HTML (if available) +if test -f Work/build/library/base/html/mean.html; then + test -d Refmans.prev && rm -rf Refmans.prev + test -d Refmans && mv Refmans Refmans.prev + mkdir -p Refmans/base + for f in Work/build/library/*/DESCRIPTION; do + grep -q "^Priority: base" ${f} || continue + d=`dirname ${f}` + p=`basename ${d}` + mkdir -p Refmans/base/${p} + cp ${d}/DESCRIPTION Refmans/base/${p} + cp -r ${d}/help Refmans/base/${p} + cp -r ${d}/html Refmans/base/${p} + done + mkdir -p Refmans/CRAN + for f in Work/build/Packages/*/DESCRIPTION; do + grep -q "^Repository: CRAN" ${f} || continue + d=`dirname ${f}` + p=`basename ${d}` + mkdir -p Refmans/CRAN/${p} + mv ${d}/help Refmans/CRAN/${p} + mv ${d}/html Refmans/CRAN/${p} + done +fi + + +do_cleanup_and_exit + +### Local Variables: *** +### mode: sh *** +### sh-basic-offset: 2 *** +### End: *** + diff --git a/Documents/CRAN_scripts/CRAN_regular/check.Renviron b/Documents/CRAN_scripts/CRAN_regular/check.Renviron new file mode 100644 index 0000000..021a88c --- /dev/null +++ b/Documents/CRAN_scripts/CRAN_regular/check.Renviron @@ -0,0 +1,95 @@ +## KH's personal ~/.R/check.Renviron. +## Last updated on 2022-08-10 + +### Defaults for '--as-cran": commented out where not appropriate for +### all KH checks. +_R_CHECK_AUTOCONF_=true +## _R_CHECK_BASHISMS_=true +_R_CHECK_BOGUS_RETURN_=true +_R_CHECK_CODE_USAGE_VIA_NAMESPACES_=true +_R_CHECK_CODE_USAGE_WITH_ONLY_BASE_ATTACHED_=true +## _R_CHECK_CODOC_VARIABLES_IN_USAGES_=true +_R_CHECK_COMPILATION_FLAGS_=true +## _R_CHECK_CONNECTIONS_LEFT_OPEN_=true +_R_CHECK_DEPENDS_ONLY_DATA_=true +## _R_CHECK_DONTTEST_EXAMPLES_=true +_R_CHECK_DOT_FIRSTLIB_=true +_R_CHECK_FF_AS_CRAN_=true +_R_CHECK_FUTURE_FILE_TIMESTAMPS_=true +_R_CHECK_INSTALL_DEPENDS_=true +_R_CHECK_LIMIT_CORES_=true +_R_CHECK_MATRIX_DATA_=true +## _R_CHECK_NATIVE_ROUTINE_REGISTRATION_=true +_R_CHECK_NO_RECOMMENDED_=true +_R_CHECK_NO_STOP_ON_TEST_ERROR_=true +## _R_CHECK_ORPHANED_=true +_R_CHECK_OVERWRITE_REGISTERED_S3_METHODS_=true +_R_CHECK_PACKAGE_DATASETS_SUPPRESS_NOTES_=true +## _R_CHECK_PACKAGES_USED_CRAN_INCOMING_NOTES_=true +_R_CHECK_PACKAGES_USED_IGNORE_UNUSED_IMPORTS_=true +_R_CHECK_PACKAGES_USED_IN_TESTS_USE_SUBDIRS_=true +_R_CHECK_PRAGMAS_=true +## _R_CHECK_R_DEPENDS_=warn +_R_CHECK_R_ON_PATH_=true +_R_CHECK_RD_VALIDATE_RD2HTML_=${_R_CHECK_RD_VALIDATE_RD2HTML_-true} +## _R_CHECK_RD_CONTENTS_KEYWORDS_=true +_R_CHECK_SCREEN_DEVICE_=stop +_R_CHECK_S3_METHODS_NOT_REGISTERED_=true +_R_CHECK_SHLIB_OPENMP_FLAGS_=true +_R_CHECK_TIMINGS_=10 +_R_CHECK_THINGS_IN_CHECK_DIR_=true +## _R_CHECK_THINGS_IN_TEMP_DIR_=true +_R_CHECK_VIGNETTE_TITLES_=true +## _R_CHECK_XREFS_PKGS_ARE_DECLARED_=true +## _R_CHECK_XREFS_MIND_SUSPECT_ANCHORS_=true +_R_SHLIB_BUILD_OBJECTS_SYMBOL_TABLES_=true + +_R_OPTIONS_STRINGS_AS_FACTORS_=${_R_OPTIONS_STRINGS_AS_FACTORS_-false} + +### Additional settings used for all KH checks. +_R_CHECK_ALWAYS_LOG_VIGNETTE_OUTPUT_=true +_R_CHECK_CODE_ASSIGN_TO_GLOBALENV_=true +_R_CHECK_CODE_ATTACH_=true +_R_CHECK_CODE_DATA_INTO_GLOBALENV_=true +_R_CHECK_CODETOOLS_PROFILE_="suppressPartialMatchArgs=false" +_R_CHECK_CRAN_INCOMING_CHECK_URLS_IN_PARALLEL_=true +_R_CHECK_DEPRECATED_DEFUNCT_=true +_R_CHECK_DOC_SIZES2_=true +_R_CHECK_DOTCODE_RETVAL_=true +_R_CHECK_EXECUTABLES_EXCLUSIONS_=false +## _R_CHECK_FF_CALLS_=registration +_R_CHECK_FUTURE_FILE_TIMESTAMPS_LEEWAY_=6h +_R_GC_FAIL_ON_ERROR_=true +_R_CHECK_LENGTH_1_CONDITION_="package:_R_CHECK_PACKAGE_NAME_,verbose" +## _R_CHECK_LENGTH_1_CONDITION_=${_R_CHECK_LENGTH_1_CONDITION_-warn} +## _R_CHECK_LENGTH_1_LOGIC2_="package:_R_CHECK_PACKAGE_NAME_,verbose" +_R_CHECK_OVERWRITE_REGISTERED_S3_METHODS_=true +_R_CHECK_PACKAGE_DATASETS_SUPPRESS_NOTES_=true +_R_CHECK_PKG_SIZES_=false +_R_CHECK_R_DEPENDS_=true +_R_CHECK_RD_LINE_WIDTHS_=true +_R_CHECK_RD_MATH_RENDERING_=true +_R_CHECK_REPLACING_IMPORTS_=true +_R_CHECK_SERIALIZATION_=true +_R_CHECK_SRC_MINUS_W_IMPLICIT_=true +_R_CHECK_SUGGESTS_ONLY_=${_R_CHECK_SUGGESTS_ONLY_-true} +_R_CHECK_SYSTEM_CLOCK_=false +_R_CHECK_THINGS_IN_TEMP_DIR_EXCLUDE_="^(ompi|pulse|runtime-)" +_R_CHECK_TOPLEVEL_FILES_=true +_R_CHECK_VC_DIRS_=true +_R_CHECK_VIGNETTES_SKIP_RUN_MAYBE_=true +_R_CHECK_XREFS_USE_ALIASES_FROM_CRAN_=true +## Outputs +_R_CHECK_TESTS_NLINES_=0 +_R_CHECK_VIGNETTES_NLINES_=10000 +## Timings +_R_CHECK_TIMINGS_=0 +_R_CHECK_EXAMPLE_TIMING_CPU_TO_ELAPSED_THRESHOLD_=2.5 +_R_CHECK_TEST_TIMING_=yes +_R_CHECK_TEST_TIMING_CPU_TO_ELAPSED_THRESHOLD_=2.5 +_R_CHECK_VIGNETTE_TIMING_=yes +_R_CHECK_VIGNETTE_TIMING_CPU_TO_ELAPSED_THRESHOLD_=2.5 + +## FIXME: remove eventually ... +## _R_CLASS_MATRIX_ARRAY_=${_R_CLASS_MATRIX_ARRAY_-true} + diff --git a/Documents/CRAN_scripts/CRAN_regular/check_CRAN_regular.R b/Documents/CRAN_scripts/CRAN_regular/check_CRAN_regular.R new file mode 100644 index 0000000..a00c835 --- /dev/null +++ b/Documents/CRAN_scripts/CRAN_regular/check_CRAN_regular.R @@ -0,0 +1,743 @@ +R_scripts_dir <- normalizePath(file.path("~", "lib", "R", "Scripts")) + +## Set as needed. +Ncpus_i <- Ncpus_c <- 1 +## Set as needed. +check_repository_root <- "/srv/R/Repositories" +## Set as needed. +check_packages_via_parallel_make <- "no" +## Set as needed. +libdir <- Sys.getenv("_CHECK_CRAN_REGULAR_LIBRARY_DIR_", + file.path(R.home(), "Packages")) +## Set as needed. +env_session_time_limits <- character() +## +## This used to be +## c("R_SESSION_TIME_LIMIT_CPU=600", +## "R_SESSION_TIME_LIMIT_ELAPSED=1800") +## + +xvfb_run <- "xvfb-run -a --server-args=\"-screen 0 1280x1024x24\"" + +if(dir.exists(path <- file.path(normalizePath("~"), "tmp", "scratch"))) + Sys.setenv("TMPDIR" = path) + +Sys.setenv("R_GC_MEM_GROW" = "2") + +## +## Need OMP thread limit as 3 instead of 4 when using OpenBLAS. +Sys.setenv("OMP_NUM_THREADS" = 3, # 4? + "OMP_THREAD_LIMIT" = 3, # 4? + "RCPP_PARALLEL_NUM_THREADS" = 4, + "POCL_KERNEL_CACHE" = 0, + "OMPI_MCA_btl_base_warn_component_unused" = 0 + ) +## Or maybe instead just +Sys.setenv("OPENBLAS_NUM_THREADS" = 1) +## ??? +## + +Sys.setenv("_R_CHECK_FORCE_SUGGESTS_" = "false", + "_R_CHECK_SUGGESTS_ONLY_" = "true") + +Sys.setenv("_R_CHECK_SCREEN_DEVICE_" = "warn", + "_R_CHECK_SUPPRESS_RANDR_MESSAGE_" = "true") + +## For experimenting only ... +if(Sys.getenv("_R_S3_METHOD_LOOKUP_REPORT_SEARCH_PATH_USES_") == + "true") { + Sys.setenv("_R_S3_METHOD_LOOKUP_BASEENV_AFTER_GLOBALENV_" = "false") +} else { + Sys.setenv("_R_S3_METHOD_LOOKUP_BASEENV_AFTER_GLOBALENV_" = "true") +} + +## For experimenting only ... +Sys.setenv("_R_BIND_S3_DISPATCH_FORCE_IDENTICAL_METHODS_" = "false") + +## +## This is set in the check environment file used, but the load check +## really happens at install time, hence needs special treatment for +## two-stage installs ... +Sys.setenv("_R_CHECK_INSTALL_DEPENDS_" = "true") +## + +## +## To run checks in parallel using mclapply and more than 2 cores, +## we may need something like +## Sys.setenv("_R_CHECK_LIMIT_CORES_" = "false") +## Currently not needed as we parallize via Make. +## + +## +## Remove eventually ... +## Sys.setenv("_R_S3_METHOD_REGISTRATION_NOTE_OVERWRITES_" = "true") +## + +## +## Remove eventually ... +## Sys.setenv("_R_S3_METHOD_LOOKUP_USE_TOPENV_AS_DEFENV_" = "true") +## + +## +## Remove eventually ... +Sys.setenv("_R_STOP_ON_XTFRM_DATA_FRAME_" = "true") +## + +wrkdir <- getwd() + +if(!interactive()) { + ## Command line handling. + args <- commandArgs(trailingOnly = TRUE) + pos <- which(args == "-j") + if(length(pos)) { + jobs <- args[pos + 1L] + if(grepl("/", jobs)) { + Ncpus_i <- as.integer(sub("/.*", "", jobs)) + Ncpus_c <- as.integer(sub(".*/", "", jobs)) + } else + Ncpus_i <- Ncpus_c <- as.integer(jobs) + args <- args[-c(pos, pos + 1L)] + } + pos <- which(args == "-m") + if(length(pos)) { + check_packages_via_parallel_make <- args[pos + 1L] + args <- args[-c(pos, pos + 1L)] + } + ## That's all for now ... + ## + ## Could also add a command line argument for setting + ## check_repository_root. + ## +} + +check_packages_via_parallel_make <- + tolower(check_packages_via_parallel_make) %in% c("1", "yes", "true") + +## Compute repository URLs to be used as repos option for checking, +## assuming local CRAN and BioC mirrors rooted at dir. +## Local Omegahat mirrors via rsync are no longer possible. +check_repository_URLs <- +function(dir) +{ + ## Could make this settable to smooth transitions ... + BioC_version <- + if(is.function(tools:::.BioC_version_associated_with_R_version)) { + tools:::.BioC_version_associated_with_R_version() + } else { + tools:::.BioC_version_associated_with_R_version + } + BioC_names <- c("BioCsoft", "BioCann", "BioCexp") + BioC_paths <- c("bioc", "data/annotation", "data/experiment") + + ## Assume that all needed src/contrib directories really exist. + repos <- sprintf("file://%s/%s", + normalizePath(dir), + c("CRAN", + file.path("Bioconductor", + BioC_version, + BioC_paths))) + names(repos) <- c("CRAN", BioC_names) + ## To add Omegahat: + ## repos <- c(repos, Omegahat = "http://www.omegahat.net/R") + repos +} + +format_timings_from_ts0_and_ts1 <- +function(dir) +{ + ts0 <- Sys.glob(file.path(dir, "*.ts0")) + ts1 <- Sys.glob(file.path(dir, "*.ts1")) + ## These should really have the same length, but who knows. + mt0 <- file.mtime(ts0) + mt1 <- file.mtime(ts1) + timings <- + merge(data.frame(Package = sub("\\.ts0$", "", basename(ts0)), + mt0 = mt0, stringsAsFactors = FALSE), + data.frame(Package = sub("\\.ts1$", "", basename(ts1)), + mt1 = mt1, stringsAsFactors = FALSE)) + sprintf("%s %f", timings$Package, timings$mt1 - timings$mt0) +} + +format_timings_from_ts2 <- +function(dir, pnames = NULL) +{ + if(is.null(pnames)) + ts2 <- Sys.glob(file.path(dir, "*.ts2")) + else { + ts2 <- file.path(dir, paste0(pnames, ".ts2")) + ts2 <- ts2[file.exists(ts2)] + } + sprintf("%s %f", + sub("\\.ts2$", "", basename(ts2)), + unlist(lapply(ts2, + get_CPU_seconds_used_from_time_output_file))) +} + +get_CPU_seconds_used_from_time_output_file <- +function(f) { + x <- readLines(f, warn = FALSE) + p <- "(.*)user (.*)system" + x <- x[grepl(p, x)][1L] + if(is.na(x)) + return(0) + m <- regexec(p, x) + y <- regmatches(x, m)[[1L]][-1L] + sum(vapply(parse(text = sub(":", "*60+", y)), eval, 0)) +} + +install_packages_with_timings <- +function(pnames, available, libdir, Ncpus = 1) +{ + ## If we only wanted to copy the CRAN install logs, we could record + ## the ones needed here, e.g. via + ## ilogs <- paste0(pnames, "_i.out") + + ## Use make -j for this. + + tmpd <- tempfile() + dir.create(tmpd) + conn <- file(file.path(tmpd, "Makefile"), "wt") + + ## Want to install the given packages and their available + ## dependencies including Suggests. + pdepends <- tools::package_dependencies(pnames, available, + which = "most") + pnames <- unique(c(pnames, + intersect(unlist(pdepends[pnames], + use.names = FALSE), + rownames(available)))) + ## Need to install these and their recursive dependencies. + pdepends <- tools::package_dependencies(rownames(available), + available, + recursive = TRUE) + ## Could also use utils:::.make_dependency_list(), which is a bit + ## faster (if recursive = TRUE, this drops base packages). + pnames <- unique(c(pnames, + intersect(unlist(pdepends[pnames], + use.names = FALSE), + rownames(available)))) + ## Drop base packages from the dependencies. + pdepends <- lapply(pdepends, setdiff, + tools:::.get_standard_package_names()$base) + + ## Deal with remote dependencies (Omegahat these days ...) + ind <- !startsWith(available[, "Repository"], "file://") + rpnames <- intersect(pnames, rownames(available)[ind]) + if(length(rpnames)) { + dir.create(file.path(tmpd, "Depends")) + rppaths <- available[rpnames, "Path"] + rpfiles <- file.path(tmpd, "Depends", basename(rppaths)) + for(i in seq_along(rpnames)) { + download.file(rppaths[i], rpfiles[i], quiet = TRUE) + } + available[rpnames, "Path"] <- rpfiles + } + + cmd0 <- sprintf("/usr/bin/env MAKEFLAGS= R_LIBS_USER=%s %s %s %s %s CMD INSTALL --pkglock", + shQuote(libdir), + paste(env_session_time_limits, collapse = " "), + xvfb_run, + paste(Sys.which("timeout"), + Sys.getenv("_R_INSTALL_PACKAGES_ELAPSED_TIMEOUT_", + "3600")), + shQuote(file.path(R.home("bin"), "R"))) + deps <- paste(paste0(pnames, ".ts1"), collapse = " ") + deps <- strwrap(deps, width = 75, exdent = 2) + deps <- paste(deps, collapse=" \\\n") + cat("all: ", deps, "\n", sep = "", file = conn) + verbose <- interactive() + for(p in pnames) { + cmd <- paste(cmd0, + available[p, "Iflags"], + shQuote(available[p, "Path"]), + ">", paste0(p, "_i.out"), + "2>&1") + deps <- pdepends[[p]] + deps <- if(length(deps)) + paste(paste0(deps, ".ts1"), collapse=" ") else "" + cat(paste0(p, ".ts1: ", deps), + if(verbose) { + sprintf("\t@echo begin installing package %s", + sQuote(p)) + }, + sprintf("\t@touch %s.ts0", p), + sprintf("\t@-/usr/bin/time -o %s.ts2 %s", p, cmd), + sprintf("\t@touch %s.ts1", p), + "", + sep = "\n", file = conn) + } + close(conn) + + cwd <- setwd(tmpd) + on.exit(setwd(cwd)) + + system2(Sys.getenv("MAKE", "make"), + c("-k -j", Ncpus)) + + ## Copy the install logs. + file.copy(Sys.glob("*_i.out"), cwd, copy.date = TRUE) + + ## This does not work: + ## cannot rename file ........ reason 'Invalid cross-device link' + ## ## Move the time stamps. + ## ts0 <- Sys.glob("*.ts0") + ## file.rename(ts0, + ## file.path(cwd, + ## sub("\\.ts0$", "", ts0), + ## ".install_timestamp")) + + ## Compute and return install timings. + ## timings <- format_timings_from_ts0_and_ts1(tmpd) + timings <- format_timings_from_ts2(tmpd) + + timings +} + +check_packages_with_timings <- +function(pnames, available, libdir, Ncpus = 1, make = FALSE) +{ + if(make) + check_packages_with_timings_via_make(pnames, available, + libdir, Ncpus) + else + check_packages_with_timings_via_fork(pnames, available, + libdir, Ncpus) +} + +check_packages_with_timings_via_fork <- +function(pnames, available, libdir, Ncpus = 1) +{ + ## Use mclapply() for this. + + verbose <- interactive() + + ## + timeout <- Sys.which("timeout") + tlim <- as.numeric(Sys.getenv("_R_CHECK_ELAPSED_TIMEOUT_", "1800")) + + do_one <- function(pname, available, libdir) { + if(verbose) message(sprintf("checking %s ...", pname)) + ## Do not use stdout/stderr ... + if(!is.na(match("timeout", names(formals(system2))))) + system.time(system2(file.path(R.home("bin"), "R"), + c("CMD", "check", "--timings", + "-l", shQuote(libdir), + available[pname, "Cflags"], + pname), + stdout = FALSE, stderr = FALSE, + env = c(sprintf("R_LIBS_USER=%s", + shQuote(libdir)), + env_session_time_limits, + "_R_CHECK_LIMIT_CORES_=true"), + timeout = tlim)) + else + system.time(system2(timeout, + c(tlim, + file.path(R.home("bin"), "R"), + "CMD", "check", "--timings", + "-l", shQuote(libdir), + available[pname, "Cflags"], + pname), + stdout = FALSE, stderr = FALSE, + env = c(sprintf("R_LIBS_USER=%s", + shQuote(libdir)), + env_session_time_limits, + "_R_CHECK_LIMIT_CORES_=true") + )) + } + ## + + timings <- parallel::mclapply(pnames, do_one, available, + libdir, mc.cores = Ncpus) + timings <- sprintf("%s %f", pnames, sapply(timings, `[[`, 3L)) + + timings +} + +check_packages_with_timings_via_make <- +function(pnames, available, libdir, Ncpus = 1) +{ + verbose <- interactive() + + ## Write Makefile for parallel checking. + con <- file("Makefile", "wt") + ## Note that using $(shell) is not portable: + ## Alternatively, compute all sources from R and write them out. + ## Using + ## SOURCES = `ls *.in` + ## does not work ... + lines <- + c("SOURCES = $(shell ls *.in)", + "OBJECTS = $(SOURCES:.in=.ts1)", + ".SUFFIXES:", + ".SUFFIXES: .in .ts1", + "all: $(OBJECTS)", + ".in.ts1:", + if(verbose) + "\t@echo checking $* ...", + "\t@touch $*.ts0", + ## + ## Added temporarily to investigate leftover session dirs. + ## Remove/comment eventually. + "\t@ls /tmp > $*.ls0", + ## + ## + ## As of Nov 2013, the Xvfb started from check-R-ng keeps + ## crashing [not entirely sure what from]. + ## Hence, fall back to running R CMD check inside xvfb-run. + ## Should perhaps make doing so controllable ... + sprintf("\t@-/usr/bin/time -o $*.ts2 /usr/bin/env MAKEFLAGS= R_LIBS_USER=%s %s _R_CHECK_LIMIT_CORES_=true %s %s %s CMD check --timings -l %s $($*-cflags) $* >$*_c.out 2>&1", + shQuote(libdir), + paste(env_session_time_limits, collapse = " "), + xvfb_run, + paste(Sys.which("timeout"), + Sys.getenv("_R_CHECK_ELAPSED_TIMEOUT_", "1800")), + shQuote(file.path(R.home("bin"), "R")), + shQuote(libdir)), + ## + ## + ## Added temporarily to investigate leftover session dirs. + ## Remove/comment eventually. + "\t@ls /tmp > $*.ls1", + ## + "\t@touch $*.ts1", + sprintf("%s-cflags = %s", + pnames, + available[pnames, "Cflags"])) + writeLines(lines, con) + close(con) + + file.create(paste0(pnames, ".in")) + + system2(Sys.getenv("MAKE", "make"), + c("-k -j", Ncpus)) + + ## Compute check timings. + ## timings <- format_timings_from_ts0_and_ts1(getwd()) + timings <- format_timings_from_ts2(getwd(), pnames) + + ## Clean up (should this use wildcards?) + file.remove(c(paste0(pnames, ".in"), + paste0(pnames, ".ts0"), + paste0(pnames, ".ts1"), + "Makefile")) + + timings +} + +check_args_db_from_stoplist_sh <- +function() +{ + x <- system(". ~/lib/bash/check_R_stoplists.sh; set", intern = TRUE) + x <- grep("^check_args_db_", x, value = TRUE) + db <- sub("^check_args_db_([^=]*)=(.*)$", "\\2", x) + db <- sub("'(.*)'", "\\1", db) + names(db) <- + chartr("_", ".", sub("^check_args_db_([^=]*)=.*", "\\1", x)) + db +} + +## Compute available packages as used for CRAN checking: +## Use CRAN versions in preference to versions from other repositories +## (even if these have a higher version number) +## For now, also exclude packages according to OS requirement: to +## change, drop 'OS_type' from the list of filters below. +filters <- c("R_version", "OS_type", "CRAN", "duplicates") +repos <- check_repository_URLs(check_repository_root) +## Needed for CRAN filtering below. +options(repos = repos) +## Also pass this to the profile used for checking: +Sys.setenv("_CHECK_CRAN_REGULAR_REPOSITORIES_" = + paste(sprintf("%s=%s", names(repos), repos), collapse = ";")) + +curls <- contrib.url(repos) +available <- available.packages(contriburl = curls, filters = filters) +## Recommended packages require special treatment: the versions in the +## version specific CRAN subdirectories are not listed as available. So +## create the corresponding information from what is installed in the +## system library, and merge this in by removing duplicates (so that for +## recommended packages we check the highest "available" version, which +## for release/patched may be in the main package area). +installed <- installed.packages(lib.loc = .Library) +ind <- (installed[, "Priority"] == "recommended") +pos <- match(colnames(available), colnames(installed), nomatch = 0L) +nightmare <- matrix(NA_character_, sum(ind), ncol(available), + dimnames = list(installed[ind, "Package"], + colnames(available))) +nightmare[ , pos > 0] <- installed[ind, pos] +## Compute where the recommended packages came from. +## Could maybe get this as R_VERSION from the environment. +R_version <- sprintf("%s.%s", R.version$major, R.version$minor) +if(R.version$status == "Patched") + R_version <- sub("\\.[[:digit:]]*$", "-patched", R_version) +nightmare[, "Repository"] <- + file.path(repos["CRAN"], "src", "contrib", R_version, "Recommended") + +ind <- (!is.na(priority <- available[, "Priority"]) & + (priority == "recommended")) + +available <- + rbind(tools:::.remove_stale_dups(rbind(nightmare, available[ind, ])), + available[!ind, ]) + +## Make sure we have the most recent versions of the recommended +## packages in .Library. +update.packages(lib.loc = .Library, available = available, ask = FALSE) + +## Paths to package tarballs. +pfiles <- sub("^file://", "", + sprintf("%s/%s_%s.tar.gz", + available[, "Repository"], + available[, "Package"], + available[, "Version"])) +available <- cbind(available, Path = pfiles) + +## Unpack all CRAN packages to simplify checking via Make. +ind <- startsWith(available[, "Repository"], repos["CRAN"]) +## +## In principle we could also check the e.g. BioC (software) packages by +## (optionally) doing +## ind <- ind | startsWith(available[, "Repository"], +## repos["BioCsoft"]) +## +results <- + parallel::mclapply(pfiles[ind], + function(p) + system2("tar", c("zxf", p), + stdout = FALSE, stderr = FALSE), + mc.cores = Ncpus_i) +## +## * Earlier version also installed the CRAN packages from the unpacked +## sources, to save the resources of the additional unpacking when +## installing from the tarballs. This complicates checking (and made +## it necessary to use an .install_timestamp mechanism to identify +## files in the unpacked sources created by installation): hence, we +## no longer do so. +## * We could easily change check_packages_with_timings_via_fork() to +## use the package tarballs for checking: simply replace 'pname' by +## 'available[pname, "Path"]' in the call to R CMD check. +## For check_packages_with_timings_via_make(), we would need to change +## '$*' in the Make rule by something like $(*-path), and add these +## PNAME-path variables along the lines of adding the PNAME-cflags +## variables. +## + +## Add information on install and check flags. +## Keep things simple, assuming that the check args db entries are one +## of '--install=fake', '--install=no', or a combination of other +## arguments to be used for full installs. +check_args_db <- check_args_db_from_stoplist_sh() +pnames <- rownames(available)[ind] +pnames_using_install_no <- + intersect(names(check_args_db)[check_args_db == "--install=no"], + pnames) +pnames_using_install_fake <- + intersect(names(check_args_db)[check_args_db == "--install=fake"], + pnames) +pnames_using_install_full <- + setdiff(pnames, + c(pnames_using_install_no, pnames_using_install_fake)) +## For simplicity, use character vectors of install and check flags. +iflags <- character(length(pfiles)) +names(iflags) <- rownames(available) +cflags <- iflags +iflags[pnames_using_install_fake] <- "--fake" +## Packages using a full install are checked with '--install=check:OUT', +## where OUT is the full/fake install output file. +## +## Packages using a fake install are checked with '--install=fake'. +## Currently it is not possible to re-use the install output file, as we +## cannot give both --install=fake --install=check:OUT to R CMD check. +## However, in principle checking with --install=fake mostly only +## turns off the run time tests, so we check --install=fake packages +## with --install=check:OUT --no-examples --no-vignettes --no-tests. +cflags[pnames_using_install_no] <- "--install=no" +## cflags[pnames_using_install_fake] <- "--install=fake" +cflags[pnames_using_install_fake] <- + sprintf(if((getRversion() >= "4.2.0") && + (as.integer(R.version[["svn rev"]]) >= 80722)) + "--install='check+fake:%s/%s_i.out' %s" + else + "--install='check:%s/%s_i.out' %s", + wrkdir, pnames_using_install_fake, + "--no-examples --no-vignettes --no-tests") +## +pnames <- intersect(pnames_using_install_full, names(check_args_db)) +cflags[pnames] <- sprintf("--install='check:%s/%s_i.out' %s", + wrkdir, pnames, check_args_db[pnames]) +pnames <- setdiff(pnames_using_install_full, names(check_args_db)) +cflags[pnames] <- sprintf("--install='check:%s/%s_i.out'", + wrkdir, pnames) +## Now add install and check flags to available db. +available <- cbind(available, Iflags = iflags, Cflags = cflags) + +## Should already have been created by the check-R-ng shell code. +if(!utils::file_test("-d", libdir)) dir.create(libdir) + +## For testing purposes: +## pnames <- +## c(head(pnames_using_install_full, 50), +## pnames_using_install_fake, +## pnames_using_install_no) +pnames <- + c(pnames_using_install_full, + pnames_using_install_fake, + pnames_using_install_no) + +## +## Some packages cannot be checked using the current timeouts (e.g., as +## of 2019-03 maGUI takes very long to perform the R code analysis, +## which cannot be disabled selectively). +## Hence, drop these ... +## There should perhaps be a way of doing this programmatically from the +## stoplists ... +pnames_to_be_dropped <- c("maGUI") +pnames <- setdiff(pnames, pnames_to_be_dropped) +## + +timings <- + install_packages_with_timings(setdiff(pnames, + pnames_using_install_no), + available, + libdir, + Ncpus_i) +writeLines(timings, "timings_i.tab") + +## Some packages fail when using SNOW to create socket clusters +## simultaneously, with +## In socketConnection(port = port, server = TRUE, blocking = TRUE, : +## port 10187 cannot be opened +## These must be checked serially (or without run time tests). +## Others (e.g., gpuR) need enough system resources to be available when +## checking. +pnames_to_be_checked_serially <- + c("MSToolkit", "MSwM", "gdsfmt", "geneSignatureFinder", "gpuR", + "simFrame", "snowFT", "AFM", "AIG") + +## Do not allow packages to modify their system files when checking. +## Ideally, this is achieved via a read-only bind (re)mount of libdir, +## which can be achieved in user space via bindfs, or in kernel space +## via dedicated '/etc/fstab' non-superuser mount point entries. +## (E.g., +## +## for more information on bind mounts.) +## The user space variant adds a noticeable overhead: in 2018-01, about +## 30 minutes for check runs taking about 6.5 hours. +## Hence, do the kernel space variant if possible (as inferred by an +## entry for libdir in '/etc/fstab'). +## For the user space variant, '--no-allow-other' seems to suffice, and +## avoids the need for enabling 'user_allow_other' in '/etc/fuse.conf'. +## However, it apparently has problems when (simultaneously) checking +## Rcmdr* packages, giving "too many open files" errors when using the +## default maximum number for open file descriptors of 1024: this can be +## fixed via ulimit -n 2048 in check-R-ng. + +bind_mount_in_user_space <- + ! any(startsWith(readLines("/etc/fstab", warn = FALSE), libdir)) +if(bind_mount_in_user_space) { + system2("bindfs", + c("-r", "--no-allow-other", + shQuote(libdir), shQuote(libdir))) +} else { + system2("mount", shQuote(libdir)) +} + +## +## (We should really look at the return values of these calls.) +## + +## Older variants explicitly removed write mode bits for files in libdir +## while checking: also possible, but a bit too much, given that using a +## umask of 222 seems "strange", and *copying* from the libdir, e.g., +## using file.copy(), will by default copy the modes. +## +## system2("chmod", c("-R", "a-w", shQuote(libdir))) +## ## +## ## See above for '--install=fake' woes and how we currently work +## ## around these. +## ## But allow some access to libdir for packages using --install=fake. +## ## system2("chmod", c("u+w", shQuote(libdir))) +## ## for(p in pnames_using_install_fake) +## ## system2("chmod", c("-R", "u+w", shQuote(file.path(libdir, p)))) +## ## +## + +timings <- + check_packages_with_timings(setdiff(pnames, + pnames_to_be_checked_serially), + available, libdir, Ncpus_c, + check_packages_via_parallel_make) +if(length(pnames_to_be_checked_serially)) { + timings <- + c(timings, + check_packages_with_timings(intersect(pnames, + pnames_to_be_checked_serially), + available, libdir, 1, + check_packages_via_parallel_make)) + +} +writeLines(timings, "timings_c.tab") + +if(bind_mount_in_user_space) { + system2("fusermount", c("-u", shQuote(libdir))) +} else { + system2("umount", shQuote(libdir)) +} + +## +## (We should really look at the return values of these calls.) +## + +## Older variants case: +## +## ## Re-enable write permissions. +## system2("chmod", c("-R", "u+w", shQuote(libdir))) +## + +## Copy the package DESCRIPTION metadata over to the directories with +## the check results. +dpaths <- file.path(sprintf("%s.Rcheck", pnames), "00package.dcf") +invisible(file.copy(file.path(pnames, "DESCRIPTION"), dpaths)) +Sys.chmod(dpaths, "644") # Avoid rsync permission woes. + +## Summaries. + +## Source to get check_flavor_summary() and check_details_db(). +source(file.path(R_scripts_dir, "check.R")) + +## FIXME: use 'wrkdir' instead? +cwd <- getwd() + +## Check summary. +summary <- as.matrix(check_flavor_summary(check_dirs_root = cwd)) +## Change NA priority to empty. +summary[is.na(summary)] <- "" +## Older versions also reported all packages with NOTEs as OK. +## But why should we not want to see new NOTEs? +write.csv(summary, + file = "summary.csv", quote = 4L, row.names = FALSE) + +## Check details. +dir <- dirname(cwd) +details <- check_details_db(dirname(dir), basename(dir), drop_ok = NA) +write.csv(details[c("Package", "Version", "Check", "Status")], + file = "details.csv", quote = 3L, row.names = FALSE) +## Also saveRDS details without flavor column and and ok results left in +## from drop_ok = NA (but keep ok stubs). +details <- + details[(details$Check == "*") | + is.na(match(details$Status, + c("OK", "NONE", "SKIPPED"))), ] +details$Flavor <- NULL +saveRDS(details, "details.rds", version = 2) + +## Check timings. +timings <- merge(read.table(file.path(cwd, "timings_i.tab")), + read.table(file.path(cwd, "timings_c.tab")), + by = 1L, all = TRUE) +names(timings) <- c("Package", "T_install", "T_check") +timings$"T_total" <- + rowSums(timings[, c("T_install", "T_check")], na.rm = TRUE) +write.csv(timings, + file = "timings.csv", quote = FALSE, row.names = FALSE) + diff --git a/Documents/CRAN_scripts/exploration_of_scripts.md b/Documents/CRAN_scripts/exploration_of_scripts.md new file mode 100644 index 0000000..f53de37 --- /dev/null +++ b/Documents/CRAN_scripts/exploration_of_scripts.md @@ -0,0 +1,125 @@ +# Context + +In August 2022, we received some links pointing to resources run by the CRAN team to (1) check incoming tar.gz packages, (2) regularly check all packages on CRAN. +The answers are there: https://github.com/RConsortium/r-repositories-wg/blob/main/Documents/Proposal%20to%20CRAN.md + +In this issue, we can discuss the test of the scripts received: + +- What do we learn ? +- What is needed to make them work ? +- How are they usable on different OS flavours ? + +# Scripts + +Let's start with the incoming check as there are the first step to pass to go to CRAN. +Then, we may explore regular checks. + +## Incoming checks + +- Incoming checks are run each time there is a new packages send to CRAN + +=> It seems that each member of CRAN runs a different set of test, depending on their OS + +Scripts are: + +- A Rscript that prepares the system and run the 'R CMD check" command: https://svn.r-project.org/R-dev-web/trunk/CRAN/QA/Kurt/lib/R/Scripts/check_CRAN_incoming.R +- It is amended with env. variables: https://svn.r-project.org/R-dev-web/trunk/CRAN/QA/Kurt/.R/check.Renviron + +=> It seems that they do not use the `--as-cran` tag to run the check. + +### :heavy_check_mark: Tests on Ubuntu 22.04 LTS + +Extra steps needed on local computer: +```sh +mkdir ~/tmp/ +mkdir ~/tmp/scratch +mkdir ~/tmp/CRAN +``` + +- System function `getIncoming` is used in the R file but does not exist on my system. + +=> Need to comment the `getIncoming` part (L173-182) +=> Put the tar.gz of your package inside the `check_dir`, which by default is "~/tmp/CRAN" +=> Run the R code +=> The output looks like what we receive by email + +```r +Depends: +Package: fusen + Depends: R (>= 3.5.0) + Imports: attachment, cli, desc, devtools, glue, here (>= 1.0.0), + magrittr, parsermd (>= 0.1.0), roxygen2, stats, stringi, + tibble, tidyr, tools, usethis (>= 2.0.0), utils + +Timings: + utilisateur système écoulé +fusen 107.585 9.573 143.396 + +Results: +Check status summary: + ERROR + Source packages 1 + +Check results summary: +fusen ... ERROR +* checking CRAN incoming feasibility ... NOTE +* checking tests ... ERROR +* checking PDF version of manual ... WARNING +``` + +The full check directory is stored in the `check_dir`, which by default is "~/tmp/CRAN" +![image](https://user-images.githubusercontent.com/21193866/191078227-1fcbd05f-a03d-4699-9dcb-02d7e0031863.png) + +:heavy_check_mark: it works on Ubuntu 20.04 LTS as is + +## Regular checks + +- Regular checks seems to be run regularly. +- Question is when ? when the devel version of R change, dependencies, nightly, .... ? +- These checks seem to update the check result page of each package + + e.g. https://cran.r-project.org/web/checks/check_results_gitlabr.html +![image](https://user-images.githubusercontent.com/21193866/191074944-9a6a54b6-c1fa-44f6-910c-b55be3004c1b.png) + +Scripts are: + +- sh script: https://svn.r-project.org/R-dev-web/trunk/CRAN/QA/Kurt/bin/check-R-ng + +=> This builds the R-devel version + +- It runs a R script: https://svn.r-project.org/R-dev-web/trunk/CRAN/QA/Kurt/bin/check-R-ng +- Which itself uses this list of env. variables: https://svn.r-project.org/R-dev-web/trunk/CRAN/QA/Kurt/bin/check-R-ng + +=> This builds all R base packages + +### Tests on Ubuntu 22.04 LTS + +- Extra steps needed on local computer: +- +```sh +mkdir ~/tmp/ +mkdir ~/tmp/R.check +``` + +- Define these two directories inside file "check-R-ng" + +- R scripts directory : +``` +R_scripts_dir=~/lib/R/Scripts +``` + +- Shell scripts directory. +``` +sh_scripts_dir=~/lib/bash +``` + +- Error to be explored.... +``` +creating NEWS.2.pdf +/bin/bash: line 1: html: command not found +make: [Makefile:120: R-FAQ.epub] Error 127 (ignored) +/usr/bin/sed: can't read R-FAQ.epub.tmp: No such file or directory +make: *** [Makefile:121: R-FAQ.epub] Error 2 +Fatal error: cannot open file '/home/srochett/lib/R/Scripts/check_CRAN_regular.R': No such file or directory +``` + +=> Need to be tested inside a Docker container