Skip to content

Commit

Permalink
initial commit
Browse files Browse the repository at this point in the history
  • Loading branch information
SimonCoulombe committed Jan 11, 2022
1 parent 6b5a61d commit e99fdb1
Show file tree
Hide file tree
Showing 15 changed files with 1,074 additions and 0 deletions.
1 change: 1 addition & 0 deletions .Rbuildignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
^\.github$
42 changes: 42 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
# History files
.Rhistory
.Rapp.history

# Session Data files
.RData

# User-specific files
.Ruserdata

# Example code in package build process
*-Ex.R

# Output files from R CMD build
/*.tar.gz

# Output files from R CMD check
/*.Rcheck/

# RStudio files
.Rproj.user/

# produced vignettes
vignettes/*.html
vignettes/*.pdf

# OAuth2 token, see https://github.com/hadley/httr/releases/tag/v0.3
.httr-oauth

# knitr and R markdown default cache directories
*_cache/
/cache/

# Temporary files created by R markdown
*.utf8.md
*.knit.md

# R Environment Variables
.Renviron


data/current_well.csv
79 changes: 79 additions & 0 deletions R/archive_new_wells.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
library(dplyr)
library(readr)
library(janitor)
library(DBI)
library(RPostgres)
source("R/col_types_wells.R")
con1 <- DBI::dbConnect(
#RPostgreSQL::PostgreSQL(),
RPostgres::Postgres(),
dbname = Sys.getenv("BCGOV_DB"),
host = Sys.getenv("BCGOV_HOST"),
user = Sys.getenv("BCGOV_USR"),
password=Sys.getenv("BCGOV_PWD")
)

#library(reticulate)
#use_condaenv(condaenv = "gwells_locationqa", required= TRUE)

# this script has the type of all columns in the wells.csv
source("R/col_types_wells.R")

# lubridate can return time given a specific time zone. here are vancouver time and date
#lubridate::with_tz(Sys.time(), "America/Vancouver")
#as.Date(Sys.time() , tz = "America/Vancouver")

# read the frehsly downloaded data/wells.csv downloaded by the python script.
#newest_wells_file <- read_csv("data/wells.csv" , col_types = col_types_wells) # coltypes from R/coltypes_we

# actually we arent going to use the python script here because we need to read the other csvs in the zip file.
url <- "https://s3.ca-central-1.amazonaws.com/gwells-export/export/v2/gwells.zip"
temp_zip <- tempfile()
download.file(url, destfile = temp_zip)
temp_dir <- tempdir()
utils::unzip(temp_zip,exdir = temp_dir) # files = "well.csv",

newest_wells_file <- read_csv(
paste0(temp_dir, "/well.csv"), col_types = col_types_wells # from R/coltypes_we
)


wells_in_db <- dbGetQuery(con1, "select well_tag_number from wells")

#test <- dbGetQuery(con1, "select * from wells limit 100")
#------------------------------------------------------------
# Update historical caracteristics of wells on the day they were added
#------------------------------------------------------------

new_wells <- newest_wells_file %>%
anti_join(wells_in_db) %>%
mutate(date_added = as.Date(Sys.time() , tz = "America/Vancouver")) %>%
janitor::clean_names()

if(nrow(new_wells)> 0){
message("Appending new wells:", nrow(new_wells), " rows.", paste(new_wells$well_tag_number, collapse = " "))
dbAppendTable(con1, "wells", new_wells)

} else{message("No new wells to append.")}


############## do same thing for drilling method file

newest_drilling_method <- read_csv(
paste0(temp_dir, "/drilling_method.csv") ,
col_types = cols(well_tag_number = col_double(), drilling_method_code = col_character())
)

wells_in_drilling_method_db <- dbGetQuery(con1, "select well_tag_number from drilling_method")

new_drilling_method <- newest_drilling_method %>%
anti_join(wells_in_drilling_method_db) %>%
mutate(date_added = as.Date(Sys.time() , tz = "America/Vancouver")) %>%
janitor::clean_names()


if(nrow(new_drilling_method)> 0){
message("Appending new_drilling_method:", nrow(new_drilling_method), " rows.", paste(new_drilling_method$well_tag_number, collapse = " "))
dbAppendTable(con1, "drilling_method", new_drilling_method)

} else{message("No new drilling method to append.")}
26 changes: 26 additions & 0 deletions R/clean_after_geocode.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
library(dplyr)
library(readr)
library(janitor)
source("R/col_types_wells.R")

library(RPostgres)
con1 <- DBI::dbConnect(
RPostgres::Postgres(),
dbname = Sys.getenv("BCGOV_DB"),
host = Sys.getenv("BCGOV_HOST"),
user = Sys.getenv("BCGOV_USR"),
password=Sys.getenv("BCGOV_PWD")
)

# this is the list of well we geocoded today
newly_geocoded <- read_csv("data/wells_geocoded.csv", col_types = col_types_geocoded) %>%
mutate(date_geocoded = as.Date(Sys.time() , tz = "America/Vancouver")) %>%
janitor::clean_names()

if(nrow(newly_geocoded)> 0){
message("printing newly geocoded")
glimpse(newly_geocoded)
message("Appending newly geocoded wells:", nrow(newly_geocoded), " rows. well_tag_number=", paste(newly_geocoded$well_tag_number, collase = " "))
dbAppendTable(con1, "wells_geocoded", newly_geocoded)

} else{message("No new geocodedwells to append.")}
37 changes: 37 additions & 0 deletions R/clean_after_qa.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
library(dplyr)
library(readr)
library(janitor)

source("R/col_types_wells.R")
source("R/col_types_wells.R")

library(RPostgres)
con1 <- DBI::dbConnect(
RPostgres::Postgres(),
dbname = Sys.getenv("BCGOV_DB"),
host = Sys.getenv("BCGOV_HOST"),
user = Sys.getenv("BCGOV_USR"),
password=Sys.getenv("BCGOV_PWD")
)

old_qa <- dbGetQuery(con1, "select * from wells_qa limit 10")
message("printing out QA")
glimpse(old_qa)


newly_qa <- read_csv("gwells_locationqa.csv", col_types = col_types_qa) %>%
select(-one_of(c("Unnamed: 0", "date_added"))) %>%
mutate(date_qa = as.Date(Sys.time() , tz = "America/Vancouver")) %>%
janitor::clean_names()


if(nrow(newly_qa)> 0){

message("printing new qa")
glimpse(newly_qa)
message("Appending newly qa wells:", nrow(newly_qa), " rows. well_tag_number=", paste(newly_qa$well_tag_number, collase = " "))
dbAppendTable(con1, "wells_qa", newly_qa)
message("done")

} else{message("No new qa wells to append.")}

Loading

0 comments on commit e99fdb1

Please sign in to comment.