diff --git a/.Rhistory b/.Rhistory index 1de486c3..099ba737 100644 --- a/.Rhistory +++ b/.Rhistory @@ -1,118 +1,3 @@ -URL <- paste( -"https://github.com/", -co_notag, -"/pkgs/container/",basename(co_notag), -"/versions?filters%5Bversion_type%5D=tagged" -) -x <- rvest::read_html(URL) -URL -##### Only works if you have permissions to the repo ##### -# get_ghcr_tags <- function(container) { -# url <- paste0("https://ghcr.io/v2/", container, "/tags/list") -# response <- jsonlite::fromJSON(url) -# tags <- response$tags -# return(tags) -# } -# get_ghcr_tags("bioconductor/bioconductor_docker") -# -URL <- paste0( -"https://github.com/", -co_notag, -"/pkgs/container/",basename(co_notag), -"/versions?filters%5Bversion_type%5D=tagged" -) -x <- rvest::read_html(URL) -x -### get all nodes of class "Label" -tags <- x |> rvest::html_nodes(".Label") |> rvest::html_text() -tags -x <- tryCatch({ -rvest::read_html(URL) -}, error=function(e){ -stopper("Unable to find public container:",co)} -) -x -splt <- strsplit(co,":")[[1]] -splt -source("~/Desktop/rworkflows/R/check_tags.R") -check_tags(tags = tags, -splt = splt, -verbose = verbose) -verbose=T -check_tags(tags = tags, -splt = splt, -verbose = verbose) -# co <- "ghcr.io/bioconductor/bioconductor_docker:devel" -co_notag <- gsub("ghcr.io/","",strsplit(co,":")[[1]][1]) -##### Only works if you have permissions to the repo ##### -# get_ghcr_tags <- function(container) { -# url <- paste0("https://ghcr.io/v2/", container, "/tags/list") -# response <- jsonlite::fromJSON(url) -# tags <- response$tags -# return(tags) -# } -# tags <- get_ghcr_tags("bioconductor/bioconductor_docker") -# -URL <- paste0( -"https://github.com/", -co_notag, -"/pkgs/container/",basename(co_notag), -"/versions?filters%5Bversion_type%5D=tagged" -) -x <- tryCatch({ -rvest::read_html(URL) -}, error=function(e){ -stopper("Unable to find public container:",co)} -) -### get all nodes of class "Label" -tags <- x |> rvest::html_nodes(".Label") |> rvest::html_text() -if(length(tags)==0){ -stopper("Unable to find public container:",co) -} -strsplit(co,":")[[1]] -strsplit(co,":")[[2]] -check_tags(tags = tags, -splt = strsplit(co,":")[[1]], -verbose = verbose) -tags -check_tags(tags = tags, -splt = strsplit(co,":")[[1]][1], -verbose = verbose) -strsplit(co,":")[[1]][1] -check_tags(tags = tags, -splt = c("ew","wefwe"), -verbose = verbose) -tags -co -for(co in cont){ -# co <- "ghcr.io/bioconductor/bioconductor_docker:devel" -co_notag <- gsub("ghcr.io/","",strsplit(co,":")[[1]][1]) -##### Only works if you have permissions to the repo ##### -# get_ghcr_tags <- function(container) { -# url <- paste0("https://ghcr.io/v2/", container, "/tags/list") -# response <- jsonlite::fromJSON(url) -# tags <- response$tags -# return(tags) -# } -# tags <- get_ghcr_tags("bioconductor/bioconductor_docker") -# -URL <- paste0( -"https://github.com/", -co_notag, -"/pkgs/container/",basename(co_notag), -"/versions?filters%5Bversion_type%5D=tagged" -) -x <- tryCatch({ -rvest::read_html(URL) -}, error=function(e){ -stopper("Unable to find public container:",co)} -) -### get all nodes of class "Label" -tags <- x |> rvest::html_nodes(".Label") |> rvest::html_text() -if(length(tags)==0){ -stopper("Unable to find tags for container:",co) -} else { -check_tags(tags = tags, splt = strsplit(co,":")[[1]][1], verbose = verbose) } @@ -510,3 +395,118 @@ path <- use_dockerfile(save_dir=tempdir(), show=TRUE) library(rworkflows) path <- use_dockerfile(save_dir=tempdir(), show=TRUE) library(rworkflows) +chattr::chattr_app() +install.packages("chattr") +chattr::chattr_app() +remotes::install_github("mlverse/chattr") +chattr::chattr_app() +res <- echodeps::dep_graph(pkg = "rworkflows", +method_seed = "github", +exclude = c("neurogenomics_rworkflows", +"neurogenomics_r_workflows"), +node_size = "total_downloads", +height="100%", +reverse = TRUE, +save_path = here::here("reports","rworkflows_depgraph.html")) +res <- echodeps::dep_graph(pkg = "rworkflows", +method_seed = "github", +exclude = c("neurogenomics_rworkflows", +"neurogenomics_r_workflows"), +node_size = "total_downloads", +height="100%", +reverse = TRUE, +save_path = here::here("reports","rworkflows_depgraph.html")) +res <- echodeps::dep_graph(pkg = "rworkflows", +method_seed = "github", +exclude = c("neurogenomics_rworkflows", +"neurogenomics_r_workflows"), +node_size = "total_downloads", +height="100%", +reverse = TRUE, +save_path = here::here("reports","rworkflows_depgraph.html")) +install.packages("githubinstall") +res <- echodeps::dep_graph(pkg = "rworkflows", +method_seed = "github", +exclude = c("neurogenomics_rworkflows", +"neurogenomics_r_workflows"), +node_size = "total_downloads", +height="100%", +reverse = TRUE, +save_path = here::here("reports","rworkflows_depgraph.html")) +res <- echodeps::dep_graph(pkg = "rworkflows", +method_seed = "github", +exclude = c("neurogenomics_rworkflows", +"neurogenomics_r_workflows"), +node_size = "total_downloads", +height="100%", +reverse = TRUE, +save_path = here::here("reports","rworkflows_depgraph.html")) +check_gh_url <- function(URL){ +if(sum(grepl("https://github.com",URL))==0) { +return(NULL) +} else { +return(URL) +} +} +devtools::check_man() +library(rworkflows) +get_github_url_desc(desc_file = NULL) +library(rworkflows) +res <- echodeps::dep_graph(pkg = "rworkflows", +method_seed = "github", +exclude = c("neurogenomics_rworkflows", +"neurogenomics_r_workflows"), +node_size = "total_downloads", +height="100%", +reverse = TRUE, +save_path = here::here("reports","rworkflows_depgraph.html")) +res <- echodeps::dep_graph(pkg = "rworkflows", +method_seed = "github", +exclude = c("neurogenomics_rworkflows", +"neurogenomics_r_workflows"), +node_size = "total_downloads", +reverse = TRUE, +save_path = here::here("reports","rworkflows_depgraph.html")) +echodeps::visnet_save(res$save_path) +library(rworkflows) +method_seed = "github", +res <- echodeps::dep_graph(pkg = "rworkflows", +method_seed = "github", +exclude = c("neurogenomics_rworkflows", +"neurogenomics_r_workflows"), +#node_size = "total_downloads", +reverse = TRUE, +save_path = here::here("reports","rworkflows_depgraph.html")) +echodeps::visnet_save(res$save_path) +saveRDS(res, here::here("reports","dep_graph_res.rds")) +res$report +res$pkg +res$graph +data.frame(res$graph) +names(data.frame(res$graph)) +unique(names(data.frame(res$graph))) +sort(names(data.frame(res$graph))) +echodeps:::report_summary +res$report +r_repos_res <- echogithub::r_repos( +which = echogithub::r_repos_opts(exclude="local"), +save_path = here::here("reports","r_repos_upset.pdf"), +width = 9) +r_repos_res$repo_stats +formals(rworkflows::use_workflow()) +formals(rworkflows::use_workflow +) +length(formals(rworkflows::use_workflow)) +devtools::check_man() +devtools::check_man() +library(rworkflows) +?use_workflow +devtools::check_man() +devtools::check_man() +devtools::check_man() +library(rworkflows) +library(rworkflows) +devtools::check_man() +?use_workflow +library(rworkflows) +library(rworkflows) diff --git a/.github/workflows/rworkflows_dev.yml b/.github/workflows/rworkflows_dev.yml index 073dd617..bfb4dcc3 100644 --- a/.github/workflows/rworkflows_dev.yml +++ b/.github/workflows/rworkflows_dev.yml @@ -54,3 +54,4 @@ jobs: DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }} runner_os: ${{ runner.os }} cache_version: cache-v1 + free_diskspace: ${{ true }} diff --git a/.github/workflows/rworkflows_static.yml b/.github/workflows/rworkflows_static.yml index 39328cac..beb86a9d 100644 --- a/.github/workflows/rworkflows_static.yml +++ b/.github/workflows/rworkflows_static.yml @@ -19,17 +19,18 @@ env: run_vignettes: ${{ true }} has_testthat: ${{ true }} run_covr: ${{ true }} - run_pkgdown: ${{ false }} + run_pkgdown: ${{ true }} has_runit: ${{ false }} has_latex: ${{ false }} tinytex_installer: TinyTeX-1 - tinytex_version: '' pandoc_version: '2.19' run_docker: ${{ false }} + docker_registry: ghcr.io GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }} cache_version: cache-v1 - enable_act: ${{ false }} + miniforge_variant: ${{ false }} + activate_environment: test jobs: rworkflows_static: permissions: write-all @@ -43,7 +44,7 @@ jobs: - os: ubuntu-latest bioc: devel r: auto - cont: bioconductor/bioconductor_docker:devel + cont: ghcr.io/bioconductor/bioconductor_docker:devel rspm: https://packagemanager.rstudio.com/cran/__linux__/latest/release - os: macOS-latest bioc: release diff --git a/NEWS.md b/NEWS.md index 709095e9..92bca66b 100644 --- a/NEWS.md +++ b/NEWS.md @@ -32,6 +32,9 @@ - Improve logic to catch more GH URLs. * *vignettes/depgraph.Rmd* - Update plots with new data and resave PNG. +* New arg `free_diskspace` + - *actions.yml* + - `use_workflow` ## Bug fixes diff --git a/action.yml b/action.yml index 2a52c8b3..3a42b4d3 100644 --- a/action.yml +++ b/action.yml @@ -154,6 +154,11 @@ inputs: https://docs.conda.io/projects/conda/en/latest/user-guide/configuration/ for more information. default: '' + free_diskspace: + description: > + Free up additional disk space by deleting non-essential sofwares. + default: false + runs: using: 'composite' steps: @@ -167,12 +172,15 @@ runs: echo "NOT_CRAN=${{ !inputs.as_cran }}" >> $GITHUB_ENV shell: bash {0} - - name: Collect Workflow Telemetry 🔭 + - name: 🔭 Collect Workflow Telemetry uses: runforesight/workflow-telemetry-action@v1 continue-on-error: true + + - name: 💾 Free Disk Space + if: inputs.free_diskspace == 'true' + uses: jlumbroso/free-disk-space@v1 - ## 🐍 Setup Miniconda - - name: Setup Miniconda + - name: 🐍 Setup Miniconda if: inputs.miniforge_variant != 'false' uses: conda-incubator/setup-miniconda@v2 with: diff --git a/tests/testthat/test-construct_conda_yml.R b/tests/testthat/test-construct_conda_yml.R index 0390eaa6..0eec06c3 100644 --- a/tests/testthat/test-construct_conda_yml.R +++ b/tests/testthat/test-construct_conda_yml.R @@ -50,6 +50,12 @@ test_that("construct_conda_yml works", { testthat::expect_equal(basename(dirname(dirname(out))),envname) + ### Test conda_export + exported_yml <- conda_export(envname) + testthat::expect_true(file.exists(exported_yml)) + testthat::expect_gte(length(readLines(exported_yml)),20) + + #### From requirements.txt #### ## Currently fails due to error: ## Error: Error creating conda environment [exit code 1] diff --git a/tests/testthat/test-construct_runners.R b/tests/testthat/test-construct_runners.R index c535c652..9e771f38 100644 --- a/tests/testthat/test-construct_runners.R +++ b/tests/testthat/test-construct_runners.R @@ -1,11 +1,13 @@ test_that("construct_runners works", { + #### Set up tests #### run_tests <- function(runners){ testthat::expect_length(runners,3) for (r in runners){ testthat::expect_true(all(c("os","bioc","r") %in% names(r))) } } + #### Defaults #### runners <- construct_runners() run_tests(runners = runners) @@ -48,4 +50,15 @@ test_that("construct_runners works", { "typooo"="devel")) ) + #### When python versions passed #### + python_version <- "3.9" + runners <- construct_runners(python_version = python_version) + run_tests(runners = runners) + testthat::expect_true( + all(sapply(runners, function(x)x$r=="auto")) + ) + testthat::expect_true( + all(sapply(runners, function(x)x$`python-version`==python_version)) + ) + }) diff --git a/tests/testthat/test-use_workflow.R b/tests/testthat/test-use_workflow.R index 4e75552c..da39a797 100644 --- a/tests/testthat/test-use_workflow.R +++ b/tests/testthat/test-use_workflow.R @@ -38,7 +38,38 @@ test_that("use_workflow works", { force_new = TRUE, tinytex_installer = "TinyTeX", save_dir = tempdir()) + testthat::expect_equal(yml$on,yml3$on) + testthat::expect_equal(length(yml$jobs[[1]]$steps),1) + testthat::expect_gte(length(yml3$jobs[[1]]$steps),20) + #### Modify conda args #### + environment_file <- construct_conda_yml(dependencies = c("python>=3.9","anndata"), + preview = TRUE, + save_path = file.path(tempdir(),"conda.yml")) + miniforge_variant <- "Mambaforge" + yml4 <- use_workflow(return_path = FALSE, + force_new = TRUE, + miniforge_variant = miniforge_variant, + environment_file = environment_file, + save_dir = tempdir()) + testthat::expect_equal(yml$on,yml4$on) + testthat::expect_null(yml$jobs$rworkflows$steps[[1]]$with$miniforge_variant) + testthat::expect_equal(yml4$jobs$rworkflows$steps[[1]]$with$miniforge_variant, + miniforge_variant) + testthat::expect_equal(yml4$jobs$rworkflows$steps[[1]]$with$environment_file, + environment_file) + + miniforge_variant <- TRUE + yml5 <- use_workflow(return_path = FALSE, + force_new = TRUE, + miniforge_variant = miniforge_variant, + environment_file = environment_file, + save_dir = tempdir()) + testthat::expect_equal(yml$on,yml5$on) + testthat::expect_equal(yml5$jobs$rworkflows$steps[[1]]$with$miniforge_variant, + "") + testthat::expect_equal(yml5$jobs$rworkflows$steps[[1]]$with$environment_file, + environment_file) #### Make table out of arguments #### # defaults <- eval(formals(rworkflows::use_workflow))