Skip to content

Commit

Permalink
Add free_diskspace. increase code coverage
Browse files Browse the repository at this point in the history
  • Loading branch information
bschilder committed Oct 27, 2023
1 parent 530dcc1 commit 9193051
Show file tree
Hide file tree
Showing 8 changed files with 185 additions and 122 deletions.
230 changes: 115 additions & 115 deletions .Rhistory
Original file line number Diff line number Diff line change
@@ -1,118 +1,3 @@
URL <- paste(
"https://github.com/",
co_notag,
"/pkgs/container/",basename(co_notag),
"/versions?filters%5Bversion_type%5D=tagged"
)
x <- rvest::read_html(URL)
URL
##### Only works if you have permissions to the repo #####
# get_ghcr_tags <- function(container) {
# url <- paste0("https://ghcr.io/v2/", container, "/tags/list")
# response <- jsonlite::fromJSON(url)
# tags <- response$tags
# return(tags)
# }
# get_ghcr_tags("bioconductor/bioconductor_docker")
#
URL <- paste0(
"https://github.com/",
co_notag,
"/pkgs/container/",basename(co_notag),
"/versions?filters%5Bversion_type%5D=tagged"
)
x <- rvest::read_html(URL)
x
### get all nodes of class "Label"
tags <- x |> rvest::html_nodes(".Label") |> rvest::html_text()
tags
x <- tryCatch({
rvest::read_html(URL)
}, error=function(e){
stopper("Unable to find public container:",co)}
)
x
splt <- strsplit(co,":")[[1]]
splt
source("~/Desktop/rworkflows/R/check_tags.R")
check_tags(tags = tags,
splt = splt,
verbose = verbose)
verbose=T
check_tags(tags = tags,
splt = splt,
verbose = verbose)
# co <- "ghcr.io/bioconductor/bioconductor_docker:devel"
co_notag <- gsub("ghcr.io/","",strsplit(co,":")[[1]][1])
##### Only works if you have permissions to the repo #####
# get_ghcr_tags <- function(container) {
# url <- paste0("https://ghcr.io/v2/", container, "/tags/list")
# response <- jsonlite::fromJSON(url)
# tags <- response$tags
# return(tags)
# }
# tags <- get_ghcr_tags("bioconductor/bioconductor_docker")
#
URL <- paste0(
"https://github.com/",
co_notag,
"/pkgs/container/",basename(co_notag),
"/versions?filters%5Bversion_type%5D=tagged"
)
x <- tryCatch({
rvest::read_html(URL)
}, error=function(e){
stopper("Unable to find public container:",co)}
)
### get all nodes of class "Label"
tags <- x |> rvest::html_nodes(".Label") |> rvest::html_text()
if(length(tags)==0){
stopper("Unable to find public container:",co)
}
strsplit(co,":")[[1]]
strsplit(co,":")[[2]]
check_tags(tags = tags,
splt = strsplit(co,":")[[1]],
verbose = verbose)
tags
check_tags(tags = tags,
splt = strsplit(co,":")[[1]][1],
verbose = verbose)
strsplit(co,":")[[1]][1]
check_tags(tags = tags,
splt = c("ew","wefwe"),
verbose = verbose)
tags
co
for(co in cont){
# co <- "ghcr.io/bioconductor/bioconductor_docker:devel"
co_notag <- gsub("ghcr.io/","",strsplit(co,":")[[1]][1])
##### Only works if you have permissions to the repo #####
# get_ghcr_tags <- function(container) {
# url <- paste0("https://ghcr.io/v2/", container, "/tags/list")
# response <- jsonlite::fromJSON(url)
# tags <- response$tags
# return(tags)
# }
# tags <- get_ghcr_tags("bioconductor/bioconductor_docker")
#
URL <- paste0(
"https://github.com/",
co_notag,
"/pkgs/container/",basename(co_notag),
"/versions?filters%5Bversion_type%5D=tagged"
)
x <- tryCatch({
rvest::read_html(URL)
}, error=function(e){
stopper("Unable to find public container:",co)}
)
### get all nodes of class "Label"
tags <- x |> rvest::html_nodes(".Label") |> rvest::html_text()
if(length(tags)==0){
stopper("Unable to find tags for container:",co)
} else {
check_tags(tags = tags,
splt = strsplit(co,":")[[1]][1],
verbose = verbose)
}
Expand Down Expand Up @@ -510,3 +395,118 @@ path <- use_dockerfile(save_dir=tempdir(), show=TRUE)
library(rworkflows)
path <- use_dockerfile(save_dir=tempdir(), show=TRUE)
library(rworkflows)
chattr::chattr_app()
install.packages("chattr")
chattr::chattr_app()
remotes::install_github("mlverse/chattr")
chattr::chattr_app()
res <- echodeps::dep_graph(pkg = "rworkflows",
method_seed = "github",
exclude = c("neurogenomics_rworkflows",
"neurogenomics_r_workflows"),
node_size = "total_downloads",
height="100%",
reverse = TRUE,
save_path = here::here("reports","rworkflows_depgraph.html"))
res <- echodeps::dep_graph(pkg = "rworkflows",
method_seed = "github",
exclude = c("neurogenomics_rworkflows",
"neurogenomics_r_workflows"),
node_size = "total_downloads",
height="100%",
reverse = TRUE,
save_path = here::here("reports","rworkflows_depgraph.html"))
res <- echodeps::dep_graph(pkg = "rworkflows",
method_seed = "github",
exclude = c("neurogenomics_rworkflows",
"neurogenomics_r_workflows"),
node_size = "total_downloads",
height="100%",
reverse = TRUE,
save_path = here::here("reports","rworkflows_depgraph.html"))
install.packages("githubinstall")
res <- echodeps::dep_graph(pkg = "rworkflows",
method_seed = "github",
exclude = c("neurogenomics_rworkflows",
"neurogenomics_r_workflows"),
node_size = "total_downloads",
height="100%",
reverse = TRUE,
save_path = here::here("reports","rworkflows_depgraph.html"))
res <- echodeps::dep_graph(pkg = "rworkflows",
method_seed = "github",
exclude = c("neurogenomics_rworkflows",
"neurogenomics_r_workflows"),
node_size = "total_downloads",
height="100%",
reverse = TRUE,
save_path = here::here("reports","rworkflows_depgraph.html"))
check_gh_url <- function(URL){
if(sum(grepl("https://github.com",URL))==0) {
return(NULL)
} else {
return(URL)
}
}
devtools::check_man()
library(rworkflows)
get_github_url_desc(desc_file = NULL)
library(rworkflows)
res <- echodeps::dep_graph(pkg = "rworkflows",
method_seed = "github",
exclude = c("neurogenomics_rworkflows",
"neurogenomics_r_workflows"),
node_size = "total_downloads",
height="100%",
reverse = TRUE,
save_path = here::here("reports","rworkflows_depgraph.html"))
res <- echodeps::dep_graph(pkg = "rworkflows",
method_seed = "github",
exclude = c("neurogenomics_rworkflows",
"neurogenomics_r_workflows"),
node_size = "total_downloads",
reverse = TRUE,
save_path = here::here("reports","rworkflows_depgraph.html"))
echodeps::visnet_save(res$save_path)
library(rworkflows)
method_seed = "github",
res <- echodeps::dep_graph(pkg = "rworkflows",
method_seed = "github",
exclude = c("neurogenomics_rworkflows",
"neurogenomics_r_workflows"),
#node_size = "total_downloads",
reverse = TRUE,
save_path = here::here("reports","rworkflows_depgraph.html"))
echodeps::visnet_save(res$save_path)
saveRDS(res, here::here("reports","dep_graph_res.rds"))
res$report
res$pkg
res$graph
data.frame(res$graph)
names(data.frame(res$graph))
unique(names(data.frame(res$graph)))
sort(names(data.frame(res$graph)))
echodeps:::report_summary
res$report
r_repos_res <- echogithub::r_repos(
which = echogithub::r_repos_opts(exclude="local"),
save_path = here::here("reports","r_repos_upset.pdf"),
width = 9)
r_repos_res$repo_stats
formals(rworkflows::use_workflow())
formals(rworkflows::use_workflow
)
length(formals(rworkflows::use_workflow))
devtools::check_man()
devtools::check_man()
library(rworkflows)
?use_workflow
devtools::check_man()
devtools::check_man()
devtools::check_man()
library(rworkflows)
library(rworkflows)
devtools::check_man()
?use_workflow
library(rworkflows)
library(rworkflows)
1 change: 1 addition & 0 deletions .github/workflows/rworkflows_dev.yml
Original file line number Diff line number Diff line change
Expand Up @@ -54,3 +54,4 @@ jobs:
DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }}
runner_os: ${{ runner.os }}
cache_version: cache-v1
free_diskspace: ${{ true }}
9 changes: 5 additions & 4 deletions .github/workflows/rworkflows_static.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,17 +19,18 @@ env:
run_vignettes: ${{ true }}
has_testthat: ${{ true }}
run_covr: ${{ true }}
run_pkgdown: ${{ false }}
run_pkgdown: ${{ true }}
has_runit: ${{ false }}
has_latex: ${{ false }}
tinytex_installer: TinyTeX-1
tinytex_version: ''
pandoc_version: '2.19'
run_docker: ${{ false }}
docker_registry: ghcr.io
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }}
cache_version: cache-v1
enable_act: ${{ false }}
miniforge_variant: ${{ false }}
activate_environment: test
jobs:
rworkflows_static:
permissions: write-all
Expand All @@ -43,7 +44,7 @@ jobs:
- os: ubuntu-latest
bioc: devel
r: auto
cont: bioconductor/bioconductor_docker:devel
cont: ghcr.io/bioconductor/bioconductor_docker:devel
rspm: https://packagemanager.rstudio.com/cran/__linux__/latest/release
- os: macOS-latest
bioc: release
Expand Down
3 changes: 3 additions & 0 deletions NEWS.md
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,9 @@
- Improve logic to catch more GH URLs.
* *vignettes/depgraph.Rmd*
- Update plots with new data and resave PNG.
* New arg `free_diskspace`
- *actions.yml*
- `use_workflow`

## Bug fixes

Expand Down
14 changes: 11 additions & 3 deletions action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -154,6 +154,11 @@ inputs:
https://docs.conda.io/projects/conda/en/latest/user-guide/configuration/
for more information.
default: ''
free_diskspace:
description: >
Free up additional disk space by deleting non-essential sofwares.
default: false

runs:
using: 'composite'
steps:
Expand All @@ -167,12 +172,15 @@ runs:
echo "NOT_CRAN=${{ !inputs.as_cran }}" >> $GITHUB_ENV
shell: bash {0}

- name: Collect Workflow Telemetry 🔭
- name: 🔭 Collect Workflow Telemetry
uses: runforesight/workflow-telemetry-action@v1
continue-on-error: true

- name: 💾 Free Disk Space
if: inputs.free_diskspace == 'true'
uses: jlumbroso/free-disk-space@v1

## 🐍 Setup Miniconda
- name: Setup Miniconda
- name: 🐍 Setup Miniconda
if: inputs.miniforge_variant != 'false'
uses: conda-incubator/setup-miniconda@v2
with:
Expand Down
6 changes: 6 additions & 0 deletions tests/testthat/test-construct_conda_yml.R
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,12 @@ test_that("construct_conda_yml works", {
testthat::expect_equal(basename(dirname(dirname(out))),envname)


### Test conda_export
exported_yml <- conda_export(envname)
testthat::expect_true(file.exists(exported_yml))
testthat::expect_gte(length(readLines(exported_yml)),20)


#### From requirements.txt ####
## Currently fails due to error:
## Error: Error creating conda environment [exit code 1]
Expand Down
13 changes: 13 additions & 0 deletions tests/testthat/test-construct_runners.R
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
test_that("construct_runners works", {

#### Set up tests ####
run_tests <- function(runners){
testthat::expect_length(runners,3)
for (r in runners){
testthat::expect_true(all(c("os","bioc","r") %in% names(r)))
}
}
#### Defaults ####
runners <- construct_runners()
run_tests(runners = runners)

Expand Down Expand Up @@ -48,4 +50,15 @@ test_that("construct_runners works", {
"typooo"="devel"))
)

#### When python versions passed ####
python_version <- "3.9"
runners <- construct_runners(python_version = python_version)
run_tests(runners = runners)
testthat::expect_true(
all(sapply(runners, function(x)x$r=="auto"))
)
testthat::expect_true(
all(sapply(runners, function(x)x$`python-version`==python_version))
)

})
31 changes: 31 additions & 0 deletions tests/testthat/test-use_workflow.R
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,38 @@ test_that("use_workflow works", {
force_new = TRUE,
tinytex_installer = "TinyTeX",
save_dir = tempdir())
testthat::expect_equal(yml$on,yml3$on)
testthat::expect_equal(length(yml$jobs[[1]]$steps),1)
testthat::expect_gte(length(yml3$jobs[[1]]$steps),20)

#### Modify conda args ####
environment_file <- construct_conda_yml(dependencies = c("python>=3.9","anndata"),
preview = TRUE,
save_path = file.path(tempdir(),"conda.yml"))
miniforge_variant <- "Mambaforge"
yml4 <- use_workflow(return_path = FALSE,
force_new = TRUE,
miniforge_variant = miniforge_variant,
environment_file = environment_file,
save_dir = tempdir())
testthat::expect_equal(yml$on,yml4$on)
testthat::expect_null(yml$jobs$rworkflows$steps[[1]]$with$miniforge_variant)
testthat::expect_equal(yml4$jobs$rworkflows$steps[[1]]$with$miniforge_variant,
miniforge_variant)
testthat::expect_equal(yml4$jobs$rworkflows$steps[[1]]$with$environment_file,
environment_file)

miniforge_variant <- TRUE
yml5 <- use_workflow(return_path = FALSE,
force_new = TRUE,
miniforge_variant = miniforge_variant,
environment_file = environment_file,
save_dir = tempdir())
testthat::expect_equal(yml$on,yml5$on)
testthat::expect_equal(yml5$jobs$rworkflows$steps[[1]]$with$miniforge_variant,
"")
testthat::expect_equal(yml5$jobs$rworkflows$steps[[1]]$with$environment_file,
environment_file)

#### Make table out of arguments ####
# defaults <- eval(formals(rworkflows::use_workflow))
Expand Down

0 comments on commit 9193051

Please sign in to comment.