Skip to content

Commit

Permalink
merged PRs also show up now
Browse files Browse the repository at this point in the history
  • Loading branch information
b-rodrigues committed Feb 24, 2024
1 parent 3c31d2f commit 4305718
Show file tree
Hide file tree
Showing 2 changed files with 74 additions and 21 deletions.
76 changes: 63 additions & 13 deletions _targets.R
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,36 @@ safe_packageRank <- function(...){
)
}

get_prs <- function(state){

output_path <- paste0(state, "_prs.json")

# Run the command
system(paste0(
"gh pr list --state=", state,
" --search=rPackages -R NixOS/nixpkgs --json title,updatedAt,url > ",
output_path
))

# Return path for targets
output_path
}

clean_prs <- function(prs_raw, state){
prs_raw |>
transform(
title = gsub("^.*r(p|P)ackages\\.", "", title),
state = state
) |>
transform(
packages = gsub(":.*$", "", title),
PR_date = updatedAt,
PR = paste0('<a href="', url, '">', url, '</a>')
) |>
subset(
select = -c(title, url, updatedAt)
)
}

list(
tar_target(
Expand Down Expand Up @@ -81,26 +111,46 @@ list(
safe_packageRank(packages = unique_packages)
),

tar_target(
open_prs_file,
get_prs("open"),
format = "file"
),

tar_target(
merged_prs_file,
get_prs("merged"),
format = "file"
),

tar_target(
open_prs_raw,
fromJSON("open_prs.json") |>
fromJSON(open_prs_file) |>
subset(subset = grepl("r(p|P)ackages", title))
),

tar_target(
merged_prs_raw,
fromJSON(merged_prs_file) |>
subset(subset = grepl("r(p|P)ackages", title))
),

tar_target(
open_prs,
transform(
open_prs_raw,
title = gsub("^r(p|P)ackages\\.", "", title)
) |>
transform(
packages = gsub(":.*$", "", title),
PR_date = updatedAt,
PR = paste0('<a href="', url, '">', url, '</a>')
) |>
subset(
select = -c(title, url, updatedAt)
)
clean_prs(open_prs_raw, "open")
),

tar_target(
merged_prs,
clean_prs(merged_prs_raw, "merged")
),

tar_target(
prs_df,
rbind(open_prs, merged_prs) |>
subset(subset = PR_date > latest_eval_date,
select = c("packages", "PR", "PR_date", "state")
)
),

tar_render(
Expand Down
19 changes: 11 additions & 8 deletions r-updates-fails.Rmd
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ format: html
targets::tar_load(last_jobset_url)
targets::tar_load(results_table_with_rank)
targets::tar_load(open_prs)
targets::tar_load(prs_df)
# if everything fails, stop here
stopifnot("Table has error’d" = !is.null(results_table_with_rank))
Expand All @@ -20,20 +20,23 @@ results_df <- subset(results_table_with_rank$package.data,
```

The table below can be found at `r last_jobset_url`, but here it includes the rank of the package
as computed by the `{packageRank}` package, as well as a link to the PR to fix the build
if it has been opened. For now, a package that doesn't build because one of its dependencies
The table below can be found at `r last_jobset_url`, but here it includes the
rank of the package as computed by the `{packageRank}` package, as well as a
link to the PR to fix the build if it has been opened or if it's been merged
already. For now, a package that doesn't build because one of its dependencies
is broken is not linked to its dependency's open PR.

The action generating the website runs each day at midnight. Do check the original on Hydra,
because Bioconductor packages are not listed, nor packages for which somehow {packageRank}
can't find a ranking.
The action generating the website runs each day at midnight. Do check the
original on Hydra, because Bioconductor packages are not listed, nor packages
for which somehow {packageRank} can't find a ranking. If the pull request
doesn't follow the naming convention "rPackages.packagename: blab bal" it likely
won't show up here.

```{r, echo = FALSE}
targets::tar_load(failing_jobs)
merge(failing_jobs, results_df) |>
merge(open_prs, all.x = TRUE) |>
merge(prs_df, all.x = TRUE) |>
subset(select = -`X.`) |>
reactable::reactable(columns = list(build = reactable::colDef(html = TRUE),
PR = reactable::colDef(html = TRUE)),
Expand Down

0 comments on commit 4305718

Please sign in to comment.